Compare commits

..

15 Commits

Author SHA1 Message Date
Илья Глазунов
31d50c9713 Merge branch 'v0.7.0-update'
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-16 22:40:20 +03:00
bbad79875c Merge pull request 'enhancment/code-highlighting-in-llm-chat' (#6) from enhancment/code-highlighting-in-llm-chat into v0.7.0-update
Reviewed-on: #6
2026-02-16 19:32:13 +00:00
Илья Глазунов
7f15b65eb1 feat: add light theme support and update theme detection in renderer 2026-02-16 22:29:30 +03:00
Илья Глазунов
d6dbaa3141 feat: add syntax highlighting for code blocks in AssistantView 2026-02-16 22:29:24 +03:00
2ebde60dcd Merge pull request 'Fixing local transcription flow' (#5) from fix/local-transcription-flow into v0.7.0-update
Reviewed-on: #5
2026-02-16 16:56:55 +00:00
Илья Глазунов
0d56e06724 feat: add whisper progress tracking and UI updates for download status 2026-02-16 19:55:39 +03:00
Илья Глазунов
526bc4e877 feat: enhance Whisper worker integration with system Node.js detection 2026-02-16 17:10:57 +03:00
Илья Глазунов
684b61755c feat: implement Whisper worker for isolated audio transcription 2026-02-16 11:38:26 +03:00
Илья Глазунов
1b74968006 Add multilingual support in CustomizeView and update speech configuration handling in gemini 2026-02-15 04:00:09 +03:00
Илья Глазунов
4cf48ee0af Refactor window management and global shortcuts handling 2026-02-15 00:34:37 +03:00
Илья Глазунов
494e692738 Add OpenAI dependency and implement model loading in MainView for OpenAI-compatible API 2026-02-14 23:16:41 +03:00
Илья Глазунов
8b216bbb33 Rename project from "Cheating Daddy" to "Mastermind" across all configurations and components to reflect the new branding. 2026-02-14 20:31:35 +03:00
Илья Глазунов
bd62cf5524 Add OpenAI-compatible API support with configuration management and response handling 2026-02-14 20:18:02 +03:00
Илья Глазунов
bfd76dc0c1 Add logging for transcription handling and disable proactive audio 2026-02-14 04:28:29 +03:00
Илья Глазунов
310b6b3fbd huge refactor 2026-02-14 04:17:46 +03:00
39 changed files with 11659 additions and 10983 deletions

1
.npmrc
View File

@ -1 +0,0 @@
node-linker=hoisted

View File

@ -1,2 +0,0 @@
src/assets
node_modules

View File

@ -1,10 +0,0 @@
{
"semi": true,
"tabWidth": 4,
"printWidth": 150,
"singleQuote": true,
"trailingComma": "es5",
"bracketSpacing": true,
"arrowParens": "avoid",
"endOfLine": "lf"
}

View File

@ -10,16 +10,16 @@ packaging.
Install dependencies and run the development app: Install dependencies and run the development app:
``` ```
1. pnpm install 1. npm install
2. pnpm start 2. npm start
``` ```
## Style ## Style
Run `pnpm prettier --write .` before committing. Prettier uses the settings in Run `npx prettier --write .` before committing. Prettier uses the settings in
`.prettierrc` (four-space indentation, print width 150, semicolons and single `.prettierrc` (four-space indentation, print width 150, semicolons and single
quotes). `src/assets` and `node_modules` are ignored via `.prettierignore`. quotes). `src/assets` and `node_modules` are ignored via `.prettierignore`.
The project does not provide linting; `pnpm run lint` simply prints The project does not provide linting; `npm run lint` simply prints
"No linting configured". "No linting configured".
## Code standards ## Code standards

View File

@ -10,8 +10,6 @@
<true/> <true/>
<key>com.apple.security.cs.disable-library-validation</key> <key>com.apple.security.cs.disable-library-validation</key>
<true/> <true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.device.audio-input</key> <key>com.apple.security.device.audio-input</key>
<true/> <true/>
<key>com.apple.security.device.microphone</key> <key>com.apple.security.device.microphone</key>

View File

@ -1,101 +1,83 @@
const { FusesPlugin } = require('@electron-forge/plugin-fuses'); const { FusesPlugin } = require("@electron-forge/plugin-fuses");
const { FuseV1Options, FuseVersion } = require('@electron/fuses'); const { FuseV1Options, FuseVersion } = require("@electron/fuses");
const path = require('path');
const fs = require('fs');
module.exports = { module.exports = {
packagerConfig: { packagerConfig: {
asar: true, asar: {
extraResource: ['./src/assets/SystemAudioDump'], unpack:
name: 'Mastermind', "**/{onnxruntime-node,onnxruntime-common,@huggingface/transformers,sharp,@img}/**",
icon: 'src/assets/logo',
// Fix executable permissions after packaging
afterCopy: [
(buildPath, electronVersion, platform, arch, callback) => {
if (platform === 'darwin') {
const systemAudioDump = path.join(buildPath, '..', 'Resources', 'SystemAudioDump');
if (fs.existsSync(systemAudioDump)) {
try {
fs.chmodSync(systemAudioDump, 0o755);
console.log('✓ Set executable permissions for SystemAudioDump');
} catch (err) {
console.error('✗ Failed to set permissions:', err.message);
}
} else {
console.warn('SystemAudioDump not found at:', systemAudioDump);
}
}
callback();
},
],
// use `security find-identity -v -p codesigning` to find your identity
// for macos signing
// Disabled for local builds - ad-hoc signing causes issues
// osxSign: {
// identity: '-', // ad-hoc signing (no Apple Developer account needed)
// optionsForFile: (filePath) => {
// return {
// entitlements: 'entitlements.plist',
// };
// },
// },
// notarize is off - requires Apple Developer account
// osxNotarize: {
// appleId: 'your apple id',
// appleIdPassword: 'app specific password',
// teamId: 'your team id',
// },
}, },
rebuildConfig: {}, extraResource: ["./src/assets/SystemAudioDump"],
makers: [ name: "Mastermind",
{ icon: "src/assets/logo",
name: '@electron-forge/maker-squirrel', // use `security find-identity -v -p codesigning` to find your identity
config: { // for macos signing
name: 'mastermind', // also fuck apple
productName: 'Mastermind', // osxSign: {
shortcutName: 'Mastermind', // identity: '<paste your identity here>',
createDesktopShortcut: true, // optionsForFile: (filePath) => {
createStartMenuShortcut: true, // return {
}, // entitlements: 'entitlements.plist',
// };
// },
// },
// notarize if off cuz i ran this for 6 hours and it still didnt finish
// osxNotarize: {
// appleId: 'your apple id',
// appleIdPassword: 'app specific password',
// teamId: 'your team id',
// },
},
rebuildConfig: {
// Ensure onnxruntime-node is rebuilt against Electron's Node.js headers
// so the native binding matches the ABI used in packaged builds.
onlyModules: ["onnxruntime-node", "sharp"],
},
makers: [
{
name: "@electron-forge/maker-squirrel",
config: {
name: "mastermind",
productName: "Mastermind",
shortcutName: "Mastermind",
createDesktopShortcut: true,
createStartMenuShortcut: true,
},
},
{
name: "@electron-forge/maker-dmg",
platforms: ["darwin"],
},
{
name: "@reforged/maker-appimage",
platforms: ["linux"],
config: {
options: {
name: "Mastermind",
productName: "Mastermind",
genericName: "AI Assistant",
description: "AI assistant for interviews and learning",
categories: ["Development", "Education"],
icon: "src/assets/logo.png",
}, },
{ },
name: '@electron-forge/maker-dmg', },
platforms: ['darwin'], ],
config: { plugins: [
name: 'Mastermind', {
format: 'ULFO', name: "@electron-forge/plugin-auto-unpack-natives",
}, config: {},
}, },
{ // Fuses are used to enable/disable various Electron functionality
name: '@reforged/maker-appimage', // at package time, before code signing the application
platforms: ['linux'], new FusesPlugin({
config: { version: FuseVersion.V1,
options: { [FuseV1Options.RunAsNode]: false,
name: 'Mastermind', [FuseV1Options.EnableCookieEncryption]: true,
productName: 'Mastermind', [FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
genericName: 'AI Assistant', [FuseV1Options.EnableNodeCliInspectArguments]: false,
description: 'AI assistant for video calls, interviews, presentations, and meetings', [FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true,
categories: ['Development', 'Education'], [FuseV1Options.OnlyLoadAppFromAsar]: true,
icon: 'src/assets/logo.png', }),
}, ],
},
},
],
plugins: [
{
name: '@electron-forge/plugin-auto-unpack-natives',
config: {},
},
// Fuses are used to enable/disable various Electron functionality
// at package time, before code signing the application
new FusesPlugin({
version: FuseVersion.V1,
[FuseV1Options.RunAsNode]: false,
[FuseV1Options.EnableCookieEncryption]: true,
[FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
[FuseV1Options.EnableNodeCliInspectArguments]: false,
[FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true,
[FuseV1Options.OnlyLoadAppFromAsar]: true,
}),
],
}; };

View File

@ -1,46 +1,55 @@
{ {
"name": "mastermind", "name": "mastermind",
"productName": "mastermind", "productName": "Mastermind",
"version": "0.6.0", "version": "0.7.0",
"description": "Mastermind", "description": "Mastermind AI assistant",
"main": "src/index.js", "main": "src/index.js",
"scripts": { "scripts": {
"start": "electron-forge start", "start": "electron-forge start",
"package": "electron-forge package", "package": "electron-forge package",
"make": "electron-forge make", "make": "electron-forge make",
"publish": "electron-forge publish", "publish": "electron-forge publish",
"lint": "echo \"No linting configured\"" "lint": "echo \"No linting configured\"",
}, "postinstall": "electron-rebuild -f -w onnxruntime-node"
"keywords": [ },
"mastermind", "keywords": [
"mastermind ai", "mastermind",
"mastermind ai assistant", "mastermind ai",
"mastermind ai assistant for interviews", "mastermind ai assistant",
"mastermind ai assistant for interviews" "mastermind ai assistant for interviews",
], "mastermind ai assistant for interviews"
"author": { ],
"name": "ShiftyX1", "author": {
"email": "lead@pyserve.org" "name": "ShiftyX1",
}, "email": "lead@pyserve.org"
"license": "GPL-3.0", },
"dependencies": { "license": "GPL-3.0",
"@google/genai": "^1.35.0", "dependencies": {
"electron-squirrel-startup": "^1.0.1", "@google/genai": "^1.41.0",
"openai": "^6.16.0", "@huggingface/transformers": "^3.8.1",
"ws": "^8.18.0" "electron-squirrel-startup": "^1.0.1",
}, "ollama": "^0.6.3",
"devDependencies": { "openai": "^6.22.0",
"@electron-forge/cli": "^7.11.1", "p-retry": "^4.6.2",
"@electron-forge/maker-deb": "^7.11.1", "ws": "^8.19.0"
"@electron-forge/maker-dmg": "^7.11.1", },
"@electron-forge/maker-rpm": "^7.11.1", "devDependencies": {
"@electron-forge/maker-squirrel": "^7.11.1", "@electron/rebuild": "^3.7.1",
"@electron-forge/maker-zip": "^7.11.1", "@electron-forge/cli": "^7.8.1",
"@electron-forge/plugin-auto-unpack-natives": "^7.11.1", "@electron-forge/maker-deb": "^7.8.1",
"@electron-forge/plugin-fuses": "^7.11.1", "@electron-forge/maker-dmg": "^7.8.1",
"@electron/fuses": "^2.0.0", "@electron-forge/maker-rpm": "^7.8.1",
"@electron/osx-sign": "^2.3.0", "@electron-forge/maker-squirrel": "^7.8.1",
"@reforged/maker-appimage": "^5.1.1", "@electron-forge/maker-zip": "^7.8.1",
"electron": "^39.2.7" "@electron-forge/plugin-auto-unpack-natives": "^7.8.1",
"@electron-forge/plugin-fuses": "^7.8.1",
"@electron/fuses": "^1.8.0",
"@reforged/maker-appimage": "^5.0.0",
"electron": "^30.0.5"
},
"pnpm": {
"overrides": {
"p-retry": "4.6.2"
} }
}
} }

851
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

8
pnpm-workspace.yaml Normal file
View File

@ -0,0 +1,8 @@
onlyBuiltDependencies:
- electron
- electron-winstaller
- fs-xattr
- macos-alias
- onnxruntime-node
- protobufjs
- sharp

View File

@ -86,7 +86,7 @@ function analyzeAudioBuffer(buffer, label = 'Audio') {
// Save audio buffer with metadata for debugging // Save audio buffer with metadata for debugging
function saveDebugAudio(buffer, type, timestamp = Date.now()) { function saveDebugAudio(buffer, type, timestamp = Date.now()) {
const homeDir = require('os').homedir(); const homeDir = require('os').homedir();
const debugDir = path.join(homeDir, 'mastermind-debug'); const debugDir = path.join(homeDir, 'cheating-daddy-debug');
if (!fs.existsSync(debugDir)) { if (!fs.existsSync(debugDir)) {
fs.mkdirSync(debugDir, { recursive: true }); fs.mkdirSync(debugDir, { recursive: true });

View File

@ -3,11 +3,7 @@ import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class AppHeader extends LitElement { export class AppHeader extends LitElement {
static styles = css` static styles = css`
* { * {
font-family: font-family: var(--font);
'Inter',
-apple-system,
BlinkMacSystemFont,
sans-serif;
cursor: default; cursor: default;
user-select: none; user-select: none;
} }
@ -18,14 +14,14 @@ export class AppHeader extends LitElement {
align-items: center; align-items: center;
padding: var(--header-padding); padding: var(--header-padding);
background: var(--header-background); background: var(--header-background);
border-bottom: 1px solid var(--border-color); border-bottom: 1px solid var(--border);
} }
.header-title { .header-title {
flex: 1; flex: 1;
font-size: var(--header-font-size); font-size: var(--header-font-size);
font-weight: 500; font-weight: 500;
color: var(--text-color); color: var(--text-primary);
-webkit-app-region: drag; -webkit-app-region: drag;
} }
@ -43,8 +39,8 @@ export class AppHeader extends LitElement {
.button { .button {
background: transparent; background: transparent;
color: var(--text-color); color: var(--text-primary);
border: 1px solid var(--border-color); border: 1px solid var(--border);
padding: var(--header-button-padding); padding: var(--header-button-padding);
border-radius: 3px; border-radius: 3px;
font-size: var(--header-font-size-small); font-size: var(--header-font-size-small);
@ -77,7 +73,7 @@ export class AppHeader extends LitElement {
.icon-button:hover { .icon-button:hover {
background: var(--hover-background); background: var(--hover-background);
color: var(--text-color); color: var(--text-primary);
} }
:host([isclickthrough]) .button:hover, :host([isclickthrough]) .button:hover,
@ -90,7 +86,7 @@ export class AppHeader extends LitElement {
padding: 2px 6px; padding: 2px 6px;
border-radius: 3px; border-radius: 3px;
font-size: 11px; font-size: 11px;
font-family: 'SF Mono', Monaco, monospace; font-family: var(--font-mono);
} }
.click-through-indicator { .click-through-indicator {
@ -99,7 +95,7 @@ export class AppHeader extends LitElement {
background: var(--key-background); background: var(--key-background);
padding: 2px 6px; padding: 2px 6px;
border-radius: 3px; border-radius: 3px;
font-family: 'SF Mono', Monaco, monospace; font-family: var(--font-mono);
} }
.update-button { .update-button {
@ -124,152 +120,6 @@ export class AppHeader extends LitElement {
.update-button:hover { .update-button:hover {
background: rgba(241, 76, 76, 0.1); background: rgba(241, 76, 76, 0.1);
} }
.status-wrapper {
position: relative;
display: inline-flex;
align-items: center;
}
.status-text {
font-size: var(--header-font-size-small);
color: var(--text-secondary);
max-width: 120px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.status-text.error {
color: #f14c4c;
}
.status-tooltip {
position: absolute;
top: 100%;
right: 0;
margin-top: 8px;
background: var(--tooltip-bg, #1a1a1a);
color: var(--tooltip-text, #ffffff);
padding: 10px 14px;
border-radius: 6px;
font-size: 12px;
max-width: 300px;
word-wrap: break-word;
white-space: normal;
opacity: 0;
visibility: hidden;
transition:
opacity 0.15s ease,
visibility 0.15s ease;
pointer-events: none;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
z-index: 1000;
line-height: 1.4;
}
.status-tooltip::before {
content: '';
position: absolute;
bottom: 100%;
right: 16px;
border: 6px solid transparent;
border-bottom-color: var(--tooltip-bg, #1a1a1a);
}
.status-wrapper:hover .status-tooltip {
opacity: 1;
visibility: visible;
}
.status-tooltip .tooltip-label {
font-size: 10px;
text-transform: uppercase;
opacity: 0.6;
margin-bottom: 4px;
}
.status-tooltip .tooltip-content {
color: #f14c4c;
}
.model-info {
display: flex;
gap: 6px;
align-items: center;
}
.model-badge {
font-size: 10px;
color: var(--text-muted);
background: var(--key-background);
padding: 2px 6px;
border-radius: 3px;
font-family: 'SF Mono', Monaco, monospace;
max-width: 100px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.model-badge-wrapper {
position: relative;
display: inline-flex;
}
.model-badge-wrapper .model-tooltip {
position: absolute;
top: 100%;
right: 0;
margin-top: 8px;
background: var(--tooltip-bg, #1a1a1a);
color: var(--tooltip-text, #ffffff);
padding: 10px 14px;
border-radius: 6px;
font-size: 12px;
white-space: nowrap;
opacity: 0;
visibility: hidden;
transition:
opacity 0.15s ease,
visibility 0.15s ease;
pointer-events: none;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
z-index: 1000;
}
.model-badge-wrapper .model-tooltip::before {
content: '';
position: absolute;
bottom: 100%;
right: 16px;
border: 6px solid transparent;
border-bottom-color: var(--tooltip-bg, #1a1a1a);
}
.model-badge-wrapper:hover .model-tooltip {
opacity: 1;
visibility: visible;
}
.model-tooltip-row {
display: flex;
justify-content: space-between;
gap: 16px;
margin-bottom: 4px;
}
.model-tooltip-row:last-child {
margin-bottom: 0;
}
.model-tooltip-label {
opacity: 0.7;
}
.model-tooltip-value {
font-family: 'SF Mono', Monaco, monospace;
}
`; `;
static properties = { static properties = {
@ -284,8 +134,6 @@ export class AppHeader extends LitElement {
onHideToggleClick: { type: Function }, onHideToggleClick: { type: Function },
isClickThrough: { type: Boolean, reflect: true }, isClickThrough: { type: Boolean, reflect: true },
updateAvailable: { type: Boolean }, updateAvailable: { type: Boolean },
aiProvider: { type: String },
modelInfo: { type: Object },
}; };
constructor() { constructor() {
@ -302,8 +150,6 @@ export class AppHeader extends LitElement {
this.isClickThrough = false; this.isClickThrough = false;
this.updateAvailable = false; this.updateAvailable = false;
this._timerInterval = null; this._timerInterval = null;
this.aiProvider = 'gemini';
this.modelInfo = { model: '', visionModel: '', whisperModel: '' };
} }
connectedCallback() { connectedCallback() {
@ -314,8 +160,8 @@ export class AppHeader extends LitElement {
async _checkForUpdates() { async _checkForUpdates() {
try { try {
const currentVersion = await mastermind.getVersion(); const currentVersion = await cheatingDaddy.getVersion();
const response = await fetch('https://raw.githubusercontent.com/ShiftyX1/Mastermind/refs/heads/master/package.json'); const response = await fetch('https://raw.githubusercontent.com/sohzm/cheating-daddy/refs/heads/master/package.json');
if (!response.ok) return; if (!response.ok) return;
const remotePackage = await response.json(); const remotePackage = await response.json();
@ -344,7 +190,7 @@ export class AppHeader extends LitElement {
async _openUpdatePage() { async _openUpdatePage() {
const { ipcRenderer } = require('electron'); const { ipcRenderer } = require('electron');
await ipcRenderer.invoke('open-external', 'https://github.com/ShiftyX1/Mastermind'); await ipcRenderer.invoke('open-external', 'https://cheatingdaddy.com');
} }
disconnectedCallback() { disconnectedCallback() {
@ -425,49 +271,8 @@ export class AppHeader extends LitElement {
return navigationViews.includes(this.currentView); return navigationViews.includes(this.currentView);
} }
getProviderDisplayName() {
const names = {
'gemini': 'Gemini',
'openai-realtime': 'OpenAI Realtime',
'openai-sdk': 'OpenAI SDK',
};
return names[this.aiProvider] || this.aiProvider;
}
renderModelInfo() {
// Only show model info for OpenAI SDK provider
if (this.aiProvider !== 'openai-sdk' || !this.modelInfo) {
return '';
}
const { model, visionModel, whisperModel } = this.modelInfo;
// Show a compact badge with tooltip for model details
return html`
<div class="model-badge-wrapper">
<span class="model-badge" title="Models">${model || 'gpt-4o'}</span>
<div class="model-tooltip">
<div class="model-tooltip-row">
<span class="model-tooltip-label">Text</span>
<span class="model-tooltip-value">${model || 'gpt-4o'}</span>
</div>
<div class="model-tooltip-row">
<span class="model-tooltip-label">Vision</span>
<span class="model-tooltip-value">${visionModel || 'gpt-4o'}</span>
</div>
<div class="model-tooltip-row">
<span class="model-tooltip-label">Speech</span>
<span class="model-tooltip-value">${whisperModel || 'whisper-1'}</span>
</div>
</div>
</div>
`;
}
render() { render() {
const elapsedTime = this.getElapsedTime(); const elapsedTime = this.getElapsedTime();
const isError = this.statusText && (this.statusText.toLowerCase().includes('error') || this.statusText.toLowerCase().includes('failed'));
const shortStatus = isError ? 'Error' : this.statusText;
return html` return html`
<div class="header"> <div class="header">
@ -475,63 +280,34 @@ export class AppHeader extends LitElement {
<div class="header-actions"> <div class="header-actions">
${this.currentView === 'assistant' ${this.currentView === 'assistant'
? html` ? html`
${this.renderModelInfo()}
<span>${elapsedTime}</span> <span>${elapsedTime}</span>
<div class="status-wrapper"> <span>${this.statusText}</span>
<span class="status-text ${isError ? 'error' : ''}">${shortStatus}</span>
${isError
? html`
<div class="status-tooltip">
<div class="tooltip-label">Error Details</div>
<div class="tooltip-content">${this.statusText}</div>
</div>
`
: ''}
</div>
${this.isClickThrough ? html`<span class="click-through-indicator">click-through</span>` : ''} ${this.isClickThrough ? html`<span class="click-through-indicator">click-through</span>` : ''}
` `
: ''} : ''}
${this.currentView === 'main' ${this.currentView === 'main'
? html` ? html`
${this.updateAvailable ${this.updateAvailable ? html`
? html` <button class="update-button" @click=${this._openUpdatePage}>
<button class="update-button" @click=${this._openUpdatePage}> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" fill="currentColor">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" fill="currentColor"> <path fill-rule="evenodd" d="M13.836 2.477a.75.75 0 0 1 .75.75v3.182a.75.75 0 0 1-.75.75h-3.182a.75.75 0 0 1 0-1.5h1.37l-.84-.841a4.5 4.5 0 0 0-7.08.932.75.75 0 0 1-1.3-.75 6 6 0 0 1 9.44-1.242l.842.84V3.227a.75.75 0 0 1 .75-.75Zm-.911 7.5A.75.75 0 0 1 13.199 11a6 6 0 0 1-9.44 1.241l-.84-.84v1.371a.75.75 0 0 1-1.5 0V9.591a.75.75 0 0 1 .75-.75H5.35a.75.75 0 0 1 0 1.5H3.98l.841.841a4.5 4.5 0 0 0 7.08-.932.75.75 0 0 1 1.025-.273Z" clip-rule="evenodd" />
<path </svg>
fill-rule="evenodd" Update available
d="M13.836 2.477a.75.75 0 0 1 .75.75v3.182a.75.75 0 0 1-.75.75h-3.182a.75.75 0 0 1 0-1.5h1.37l-.84-.841a4.5 4.5 0 0 0-7.08.932.75.75 0 0 1-1.3-.75 6 6 0 0 1 9.44-1.242l.842.84V3.227a.75.75 0 0 1 .75-.75Zm-.911 7.5A.75.75 0 0 1 13.199 11a6 6 0 0 1-9.44 1.241l-.84-.84v1.371a.75.75 0 0 1-1.5 0V9.591a.75.75 0 0 1 .75-.75H5.35a.75.75 0 0 1 0 1.5H3.98l.841.841a4.5 4.5 0 0 0 7.08-.932.75.75 0 0 1 1.025-.273Z" </button>
clip-rule="evenodd" ` : ''}
/>
</svg>
Update available
</button>
`
: ''}
<button class="icon-button" @click=${this.onHistoryClick}> <button class="icon-button" @click=${this.onHistoryClick}>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">
<path <path fill-rule="evenodd" d="M10 18a8 8 0 1 0 0-16 8 8 0 0 0 0 16Zm.75-13a.75.75 0 0 0-1.5 0v5c0 .414.336.75.75.75h4a.75.75 0 0 0 0-1.5h-3.25V5Z" clip-rule="evenodd" />
fill-rule="evenodd"
d="M10 18a8 8 0 1 0 0-16 8 8 0 0 0 0 16Zm.75-13a.75.75 0 0 0-1.5 0v5c0 .414.336.75.75.75h4a.75.75 0 0 0 0-1.5h-3.25V5Z"
clip-rule="evenodd"
/>
</svg> </svg>
</button> </button>
<button class="icon-button" @click=${this.onCustomizeClick}> <button class="icon-button" @click=${this.onCustomizeClick}>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">
<path <path fill-rule="evenodd" d="M7.84 1.804A1 1 0 0 1 8.82 1h2.36a1 1 0 0 1 .98.804l.331 1.652a6.993 6.993 0 0 1 1.929 1.115l1.598-.54a1 1 0 0 1 1.186.447l1.18 2.044a1 1 0 0 1-.205 1.251l-1.267 1.113a7.047 7.047 0 0 1 0 2.228l1.267 1.113a1 1 0 0 1 .206 1.25l-1.18 2.045a1 1 0 0 1-1.187.447l-1.598-.54a6.993 6.993 0 0 1-1.929 1.115l-.33 1.652a1 1 0 0 1-.98.804H8.82a1 1 0 0 1-.98-.804l-.331-1.652a6.993 6.993 0 0 1-1.929-1.115l-1.598.54a1 1 0 0 1-1.186-.447l-1.18-2.044a1 1 0 0 1 .205-1.251l1.267-1.114a7.05 7.05 0 0 1 0-2.227L1.821 7.773a1 1 0 0 1-.206-1.25l1.18-2.045a1 1 0 0 1 1.187-.447l1.598.54A6.992 6.992 0 0 1 7.51 3.456l.33-1.652ZM10 13a3 3 0 1 0 0-6 3 3 0 0 0 0 6Z" clip-rule="evenodd" />
fill-rule="evenodd"
d="M7.84 1.804A1 1 0 0 1 8.82 1h2.36a1 1 0 0 1 .98.804l.331 1.652a6.993 6.993 0 0 1 1.929 1.115l1.598-.54a1 1 0 0 1 1.186.447l1.18 2.044a1 1 0 0 1-.205 1.251l-1.267 1.113a7.047 7.047 0 0 1 0 2.228l1.267 1.113a1 1 0 0 1 .206 1.25l-1.18 2.045a1 1 0 0 1-1.187.447l-1.598-.54a6.993 6.993 0 0 1-1.929 1.115l-.33 1.652a1 1 0 0 1-.98.804H8.82a1 1 0 0 1-.98-.804l-.331-1.652a6.993 6.993 0 0 1-1.929-1.115l-1.598.54a1 1 0 0 1-1.186-.447l-1.18-2.044a1 1 0 0 1 .205-1.251l1.267-1.114a7.05 7.05 0 0 1 0-2.227L1.821 7.773a1 1 0 0 1-.206-1.25l1.18-2.045a1 1 0 0 1 1.187-.447l1.598.54A6.992 6.992 0 0 1 7.51 3.456l.33-1.652ZM10 13a3 3 0 1 0 0-6 3 3 0 0 0 0 6Z"
clip-rule="evenodd"
/>
</svg> </svg>
</button> </button>
<button class="icon-button" @click=${this.onHelpClick}> <button class="icon-button" @click=${this.onHelpClick}>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">
<path <path fill-rule="evenodd" d="M18 10a8 8 0 1 1-16 0 8 8 0 0 1 16 0ZM8.94 6.94a.75.75 0 1 1-1.061-1.061 3 3 0 1 1 2.871 5.026v.345a.75.75 0 0 1-1.5 0v-.5c0-.72.57-1.172 1.081-1.287A1.5 1.5 0 1 0 8.94 6.94ZM10 15a1 1 0 1 0 0-2 1 1 0 0 0 0 2Z" clip-rule="evenodd" />
fill-rule="evenodd"
d="M18 10a8 8 0 1 1-16 0 8 8 0 0 1 16 0ZM8.94 6.94a.75.75 0 1 1-1.061-1.061 3 3 0 1 1 2.871 5.026v.345a.75.75 0 0 1-1.5 0v-.5c0-.72.57-1.172 1.081-1.287A1.5 1.5 0 1 0 8.94 6.94ZM10 15a1 1 0 1 0 0-2 1 1 0 0 0 0 2Z"
clip-rule="evenodd"
/>
</svg> </svg>
</button> </button>
` `
@ -539,23 +315,19 @@ export class AppHeader extends LitElement {
${this.currentView === 'assistant' ${this.currentView === 'assistant'
? html` ? html`
<button @click=${this.onHideToggleClick} class="button"> <button @click=${this.onHideToggleClick} class="button">
Hide&nbsp;&nbsp;<span class="key" style="pointer-events: none;">${mastermind.isMacOS ? 'Cmd' : 'Ctrl'}</span Hide&nbsp;&nbsp;<span class="key" style="pointer-events: none;">${cheatingDaddy.isMacOS ? 'Cmd' : 'Ctrl'}</span
>&nbsp;&nbsp;<span class="key">&bsol;</span> >&nbsp;&nbsp;<span class="key">&bsol;</span>
</button> </button>
<button @click=${this.onCloseClick} class="icon-button window-close"> <button @click=${this.onCloseClick} class="icon-button window-close">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">
<path <path d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z" />
d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
/>
</svg> </svg>
</button> </button>
` `
: html` : html`
<button @click=${this.isNavigationView() ? this.onBackClick : this.onCloseClick} class="icon-button window-close"> <button @click=${this.isNavigationView() ? this.onBackClick : this.onCloseClick} class="icon-button window-close">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">
<path <path d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z" />
d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
/>
</svg> </svg>
</button> </button>
`} `}

File diff suppressed because it is too large Load Diff

View File

@ -1,640 +0,0 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { AppHeader } from './AppHeader.js';
import { MainView } from '../views/MainView.js';
import { CustomizeView } from '../views/CustomizeView.js';
import { HelpView } from '../views/HelpView.js';
import { HistoryView } from '../views/HistoryView.js';
import { AssistantView } from '../views/AssistantView.js';
import { OnboardingView } from '../views/OnboardingView.js';
import { ScreenPickerDialog } from '../views/ScreenPickerDialog.js';
export class MastermindApp extends LitElement {
static styles = css`
* {
box-sizing: border-box;
font-family:
'Inter',
-apple-system,
BlinkMacSystemFont,
sans-serif;
margin: 0px;
padding: 0px;
cursor: default;
user-select: none;
}
:host {
display: block;
width: 100%;
height: 100vh;
background-color: var(--background-transparent);
color: var(--text-color);
}
.window-container {
height: 100vh;
overflow: hidden;
background: var(--bg-primary);
}
.container {
display: flex;
flex-direction: column;
height: 100%;
}
.main-content {
flex: 1;
padding: var(--main-content-padding);
overflow-y: auto;
background: var(--main-content-background);
}
.main-content.with-border {
border-top: none;
}
.main-content.assistant-view {
padding: 12px;
}
.main-content.onboarding-view {
padding: 0;
background: transparent;
}
.main-content.settings-view,
.main-content.help-view,
.main-content.history-view {
padding: 0;
}
.view-container {
opacity: 1;
height: 100%;
}
.view-container.entering {
opacity: 0;
}
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: transparent;
}
::-webkit-scrollbar-thumb {
background: var(--scrollbar-thumb);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: var(--scrollbar-thumb-hover);
}
`;
static properties = {
currentView: { type: String },
statusText: { type: String },
startTime: { type: Number },
isRecording: { type: Boolean },
sessionActive: { type: Boolean },
selectedProfile: { type: String },
selectedLanguage: { type: String },
responses: { type: Array },
currentResponseIndex: { type: Number },
selectedScreenshotInterval: { type: String },
selectedImageQuality: { type: String },
layoutMode: { type: String },
_viewInstances: { type: Object, state: true },
_isClickThrough: { state: true },
_awaitingNewResponse: { state: true },
shouldAnimateResponse: { type: Boolean },
_storageLoaded: { state: true },
aiProvider: { type: String },
modelInfo: { type: Object },
showScreenPicker: { type: Boolean },
screenSources: { type: Array },
};
constructor() {
super();
// Set defaults - will be overwritten by storage
this.currentView = 'main'; // Will check onboarding after storage loads
this.statusText = '';
this.startTime = null;
this.isRecording = false;
this.sessionActive = false;
this.selectedProfile = 'interview';
this.selectedLanguage = 'en-US';
this.selectedScreenshotInterval = '5';
this.selectedImageQuality = 'medium';
this.layoutMode = 'normal';
this.responses = [];
this.currentResponseIndex = -1;
this._viewInstances = new Map();
this._isClickThrough = false;
this._awaitingNewResponse = false;
this._currentResponseIsComplete = true;
this.shouldAnimateResponse = false;
this._storageLoaded = false;
this.aiProvider = 'gemini';
this.modelInfo = { model: '', visionModel: '', whisperModel: '' };
this.showScreenPicker = false;
this.screenSources = [];
// Load from storage
this._loadFromStorage();
}
async _loadFromStorage() {
try {
const [config, prefs, openaiSdkCreds] = await Promise.all([
mastermind.storage.getConfig(),
mastermind.storage.getPreferences(),
mastermind.storage.getOpenAISDKCredentials(),
]);
// Check onboarding status
this.currentView = config.onboarded ? 'main' : 'onboarding';
// Apply background appearance (color + transparency)
this.applyBackgroundAppearance(prefs.backgroundColor ?? '#1e1e1e', prefs.backgroundTransparency ?? 0.8);
// Load preferences
this.selectedProfile = prefs.selectedProfile || 'interview';
this.selectedLanguage = prefs.selectedLanguage || 'en-US';
this.selectedScreenshotInterval = prefs.selectedScreenshotInterval || '5';
this.selectedImageQuality = prefs.selectedImageQuality || 'medium';
this.layoutMode = config.layout || 'normal';
// Load AI provider and model info
this.aiProvider = prefs.aiProvider || 'gemini';
this.modelInfo = {
model: openaiSdkCreds.model || 'gpt-4o',
visionModel: openaiSdkCreds.visionModel || 'gpt-4o',
whisperModel: openaiSdkCreds.whisperModel || 'whisper-1',
};
this._storageLoaded = true;
this.updateLayoutMode();
this.requestUpdate();
} catch (error) {
console.error('Error loading from storage:', error);
this._storageLoaded = true;
this.requestUpdate();
}
}
hexToRgb(hex) {
const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result
? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16),
}
: { r: 30, g: 30, b: 30 };
}
lightenColor(rgb, amount) {
return {
r: Math.min(255, rgb.r + amount),
g: Math.min(255, rgb.g + amount),
b: Math.min(255, rgb.b + amount),
};
}
applyBackgroundAppearance(backgroundColor, alpha) {
const root = document.documentElement;
const baseRgb = this.hexToRgb(backgroundColor);
// Generate color variants based on the base color
const secondary = this.lightenColor(baseRgb, 7);
const tertiary = this.lightenColor(baseRgb, 15);
const hover = this.lightenColor(baseRgb, 20);
root.style.setProperty('--header-background', `rgba(${baseRgb.r}, ${baseRgb.g}, ${baseRgb.b}, ${alpha})`);
root.style.setProperty('--main-content-background', `rgba(${baseRgb.r}, ${baseRgb.g}, ${baseRgb.b}, ${alpha})`);
root.style.setProperty('--bg-primary', `rgba(${baseRgb.r}, ${baseRgb.g}, ${baseRgb.b}, ${alpha})`);
root.style.setProperty('--bg-secondary', `rgba(${secondary.r}, ${secondary.g}, ${secondary.b}, ${alpha})`);
root.style.setProperty('--bg-tertiary', `rgba(${tertiary.r}, ${tertiary.g}, ${tertiary.b}, ${alpha})`);
root.style.setProperty('--bg-hover', `rgba(${hover.r}, ${hover.g}, ${hover.b}, ${alpha})`);
root.style.setProperty('--input-background', `rgba(${tertiary.r}, ${tertiary.g}, ${tertiary.b}, ${alpha})`);
root.style.setProperty('--input-focus-background', `rgba(${tertiary.r}, ${tertiary.g}, ${tertiary.b}, ${alpha})`);
root.style.setProperty('--hover-background', `rgba(${hover.r}, ${hover.g}, ${hover.b}, ${alpha})`);
root.style.setProperty('--scrollbar-background', `rgba(${baseRgb.r}, ${baseRgb.g}, ${baseRgb.b}, ${alpha})`);
}
// Keep old function name for backwards compatibility
applyBackgroundTransparency(alpha) {
this.applyBackgroundAppearance('#1e1e1e', alpha);
}
connectedCallback() {
super.connectedCallback();
// Apply layout mode to document root
this.updateLayoutMode();
// Set up IPC listeners if needed
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.on('new-response', (_, response) => {
this.addNewResponse(response);
});
ipcRenderer.on('update-response', (_, response) => {
this.updateCurrentResponse(response);
});
ipcRenderer.on('update-status', (_, status) => {
this.setStatus(status);
});
ipcRenderer.on('click-through-toggled', (_, isEnabled) => {
this._isClickThrough = isEnabled;
});
ipcRenderer.on('reconnect-failed', (_, data) => {
this.addNewResponse(data.message);
});
}
}
disconnectedCallback() {
super.disconnectedCallback();
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.removeAllListeners('new-response');
ipcRenderer.removeAllListeners('update-response');
ipcRenderer.removeAllListeners('update-status');
ipcRenderer.removeAllListeners('click-through-toggled');
ipcRenderer.removeAllListeners('reconnect-failed');
}
}
setStatus(text) {
this.statusText = text;
// Mark response as complete when we get certain status messages
if (text.includes('Ready') || text.includes('Listening') || text.includes('Error')) {
this._currentResponseIsComplete = true;
console.log('[setStatus] Marked current response as complete');
}
}
addNewResponse(response) {
// Add a new response entry (first word of a new AI response)
this.responses = [...this.responses, response];
this.currentResponseIndex = this.responses.length - 1;
this._awaitingNewResponse = false;
console.log('[addNewResponse] Added:', response);
this.requestUpdate();
}
updateCurrentResponse(response) {
// Update the current response in place (streaming subsequent words)
if (this.responses.length > 0) {
this.responses = [...this.responses.slice(0, -1), response];
console.log('[updateCurrentResponse] Updated to:', response);
} else {
// Fallback: if no responses exist, add as new
this.addNewResponse(response);
}
this.requestUpdate();
}
// Header event handlers
handleCustomizeClick() {
this.currentView = 'customize';
this.requestUpdate();
}
handleHelpClick() {
this.currentView = 'help';
this.requestUpdate();
}
handleHistoryClick() {
this.currentView = 'history';
this.requestUpdate();
}
async handleClose() {
if (this.currentView === 'customize' || this.currentView === 'help' || this.currentView === 'history') {
this.currentView = 'main';
} else if (this.currentView === 'assistant') {
mastermind.stopCapture();
// Close the session
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('close-session');
}
this.sessionActive = false;
this.currentView = 'main';
console.log('Session closed');
} else {
// Quit the entire application
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('quit-application');
}
}
}
async handleHideToggle() {
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('toggle-window-visibility');
}
}
// Main view event handlers
async handleStart() {
// check if api key is empty do nothing
const apiKey = await mastermind.storage.getApiKey();
if (!apiKey || apiKey === '') {
// Trigger the red blink animation on the API key input
const mainView = this.shadowRoot.querySelector('main-view');
if (mainView && mainView.triggerApiKeyError) {
mainView.triggerApiKeyError();
}
return;
}
await mastermind.initializeGemini(this.selectedProfile, this.selectedLanguage);
// Pass the screenshot interval as string (including 'manual' option)
mastermind.startCapture(this.selectedScreenshotInterval, this.selectedImageQuality);
this.responses = [];
this.currentResponseIndex = -1;
this.startTime = Date.now();
this.currentView = 'assistant';
}
async handleAPIKeyHelp() {
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('open-external', 'https://cheatingdaddy.com/help/api-key');
}
}
// Customize view event handlers
async handleProfileChange(profile) {
this.selectedProfile = profile;
await mastermind.storage.updatePreference('selectedProfile', profile);
}
async handleLanguageChange(language) {
this.selectedLanguage = language;
await mastermind.storage.updatePreference('selectedLanguage', language);
}
async handleScreenshotIntervalChange(interval) {
this.selectedScreenshotInterval = interval;
await mastermind.storage.updatePreference('selectedScreenshotInterval', interval);
}
async handleImageQualityChange(quality) {
this.selectedImageQuality = quality;
await mastermind.storage.updatePreference('selectedImageQuality', quality);
}
handleBackClick() {
this.currentView = 'main';
this.requestUpdate();
}
// Help view event handlers
async handleExternalLinkClick(url) {
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('open-external', url);
}
}
// Assistant view event handlers
async handleSendText(message) {
const result = await window.mastermind.sendTextMessage(message);
if (!result.success) {
console.error('Failed to send message:', result.error);
this.setStatus('Error sending message: ' + result.error);
} else {
this.setStatus('Message sent...');
this._awaitingNewResponse = true;
}
}
handleResponseIndexChanged(e) {
this.currentResponseIndex = e.detail.index;
this.shouldAnimateResponse = false;
this.requestUpdate();
}
// Onboarding event handlers
handleOnboardingComplete() {
this.currentView = 'main';
}
updated(changedProperties) {
super.updated(changedProperties);
// Only notify main process of view change if the view actually changed
if (changedProperties.has('currentView') && window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.send('view-changed', this.currentView);
// Add a small delay to smooth out the transition
const viewContainer = this.shadowRoot?.querySelector('.view-container');
if (viewContainer) {
viewContainer.classList.add('entering');
requestAnimationFrame(() => {
viewContainer.classList.remove('entering');
});
}
}
if (changedProperties.has('layoutMode')) {
this.updateLayoutMode();
}
}
renderCurrentView() {
// Only re-render the view if it hasn't been cached or if critical properties changed
const viewKey = `${this.currentView}-${this.selectedProfile}-${this.selectedLanguage}`;
switch (this.currentView) {
case 'onboarding':
return html`
<onboarding-view .onComplete=${() => this.handleOnboardingComplete()} .onClose=${() => this.handleClose()}></onboarding-view>
`;
case 'main':
return html`
<main-view
.onStart=${() => this.handleStart()}
.onAPIKeyHelp=${() => this.handleAPIKeyHelp()}
.onLayoutModeChange=${layoutMode => this.handleLayoutModeChange(layoutMode)}
></main-view>
`;
case 'customize':
return html`
<customize-view
.selectedProfile=${this.selectedProfile}
.selectedLanguage=${this.selectedLanguage}
.selectedScreenshotInterval=${this.selectedScreenshotInterval}
.selectedImageQuality=${this.selectedImageQuality}
.layoutMode=${this.layoutMode}
.onProfileChange=${profile => this.handleProfileChange(profile)}
.onLanguageChange=${language => this.handleLanguageChange(language)}
.onScreenshotIntervalChange=${interval => this.handleScreenshotIntervalChange(interval)}
.onImageQualityChange=${quality => this.handleImageQualityChange(quality)}
.onLayoutModeChange=${layoutMode => this.handleLayoutModeChange(layoutMode)}
></customize-view>
`;
case 'help':
return html` <help-view .onExternalLinkClick=${url => this.handleExternalLinkClick(url)}></help-view> `;
case 'history':
return html` <history-view></history-view> `;
case 'assistant':
return html`
<assistant-view
.responses=${this.responses}
.currentResponseIndex=${this.currentResponseIndex}
.selectedProfile=${this.selectedProfile}
.aiProvider=${this.aiProvider}
.onSendText=${message => this.handleSendText(message)}
.shouldAnimateResponse=${this.shouldAnimateResponse}
@response-index-changed=${this.handleResponseIndexChanged}
@response-animation-complete=${() => {
this.shouldAnimateResponse = false;
this._currentResponseIsComplete = true;
console.log('[response-animation-complete] Marked current response as complete');
this.requestUpdate();
}}
></assistant-view>
`;
default:
return html`<div>Unknown view: ${this.currentView}</div>`;
}
}
render() {
const viewClassMap = {
assistant: 'assistant-view',
onboarding: 'onboarding-view',
customize: 'settings-view',
help: 'help-view',
history: 'history-view',
};
const mainContentClass = `main-content ${viewClassMap[this.currentView] || 'with-border'}`;
return html`
<div class="window-container">
<div class="container">
<app-header
.currentView=${this.currentView}
.statusText=${this.statusText}
.startTime=${this.startTime}
.aiProvider=${this.aiProvider}
.modelInfo=${this.modelInfo}
.onCustomizeClick=${() => this.handleCustomizeClick()}
.onHelpClick=${() => this.handleHelpClick()}
.onHistoryClick=${() => this.handleHistoryClick()}
.onCloseClick=${() => this.handleClose()}
.onBackClick=${() => this.handleBackClick()}
.onHideToggleClick=${() => this.handleHideToggle()}
?isClickThrough=${this._isClickThrough}
></app-header>
<div class="${mainContentClass}">
<div class="view-container">${this.renderCurrentView()}</div>
</div>
</div>
${this.showScreenPicker
? html`
<screen-picker-dialog
?visible=${this.showScreenPicker}
.sources=${this.screenSources}
@source-selected=${this.handleSourceSelected}
@cancelled=${this.handlePickerCancelled}
></screen-picker-dialog>
`
: ''}
</div>
`;
}
updateLayoutMode() {
// Apply or remove compact layout class to document root
if (this.layoutMode === 'compact') {
document.documentElement.classList.add('compact-layout');
} else {
document.documentElement.classList.remove('compact-layout');
}
}
async handleLayoutModeChange(layoutMode) {
this.layoutMode = layoutMode;
await mastermind.storage.updateConfig('layout', layoutMode);
this.updateLayoutMode();
// Notify main process about layout change for window resizing
if (window.require) {
try {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('update-sizes');
} catch (error) {
console.error('Failed to update sizes in main process:', error);
}
}
this.requestUpdate();
}
async showScreenPickerDialog() {
const { ipcRenderer } = window.require('electron');
const result = await ipcRenderer.invoke('get-screen-sources');
if (result.success) {
this.screenSources = result.sources;
this.showScreenPicker = true;
return new Promise(resolve => {
this._screenPickerResolve = resolve;
});
} else {
console.error('Failed to get screen sources:', result.error);
return { cancelled: true };
}
}
async handleSourceSelected(event) {
const { source } = event.detail;
const { ipcRenderer } = window.require('electron');
// Tell main process which source was selected
await ipcRenderer.invoke('set-selected-source', source.id);
this.showScreenPicker = false;
if (this._screenPickerResolve) {
this._screenPickerResolve({ source });
this._screenPickerResolve = null;
}
}
handlePickerCancelled() {
this.showScreenPicker = false;
if (this._screenPickerResolve) {
this._screenPickerResolve({ cancelled: true });
this._screenPickerResolve = null;
}
}
}
customElements.define('mastermind-app', MastermindApp);

View File

@ -1,5 +1,5 @@
// Main app components // Main app components
export { MastermindApp } from './app/MastermindApp.js'; export { CheatingDaddyApp } from './app/CheatingDaddyApp.js';
export { AppHeader } from './app/AppHeader.js'; export { AppHeader } from './app/AppHeader.js';
// View components // View components

View File

@ -0,0 +1,143 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { unifiedPageStyles } from './sharedPageStyles.js';
export class AICustomizeView extends LitElement {
static styles = [
unifiedPageStyles,
css`
.unified-page {
height: 100%;
}
.unified-wrap {
height: 100%;
}
section.surface {
flex: 1;
display: flex;
flex-direction: column;
}
.form-grid {
flex: 1;
display: flex;
flex-direction: column;
}
.form-group.vertical {
flex: 1;
display: flex;
flex-direction: column;
}
textarea.control {
flex: 1;
resize: none;
overflow-y: auto;
min-height: 0;
}
`,
];
static properties = {
selectedProfile: { type: String },
onProfileChange: { type: Function },
_context: { state: true },
_providerMode: { state: true },
};
constructor() {
super();
this.selectedProfile = 'interview';
this.onProfileChange = () => {};
this._context = '';
this._providerMode = 'byok';
this._loadFromStorage();
}
async _loadFromStorage() {
try {
const prefs = await cheatingDaddy.storage.getPreferences();
this._context = prefs.customPrompt || '';
this._providerMode = prefs.providerMode || 'byok';
this.requestUpdate();
} catch (error) {
console.error('Error loading AI customize storage:', error);
}
}
_handleProfileChange(e) {
this.onProfileChange(e.target.value);
}
async _handleProviderModeChange(e) {
this._providerMode = e.target.value;
await cheatingDaddy.storage.updatePreference('providerMode', this._providerMode);
this.requestUpdate();
}
async _saveContext(val) {
this._context = val;
await cheatingDaddy.storage.updatePreference('customPrompt', val);
}
_getProfileName(profile) {
const names = {
interview: 'Job Interview',
sales: 'Sales Call',
meeting: 'Business Meeting',
presentation: 'Presentation',
negotiation: 'Negotiation',
exam: 'Exam Assistant',
};
return names[profile] || profile;
}
render() {
const profiles = [
{ value: 'interview', label: 'Job Interview' },
{ value: 'sales', label: 'Sales Call' },
{ value: 'meeting', label: 'Business Meeting' },
{ value: 'presentation', label: 'Presentation' },
{ value: 'negotiation', label: 'Negotiation' },
{ value: 'exam', label: 'Exam Assistant' },
];
return html`
<div class="unified-page">
<div class="unified-wrap">
<div>
<div class="page-title">AI Context</div>
</div>
<section class="surface">
<div class="form-grid">
<div class="form-group">
<label class="form-label">Regime</label>
<select class="control" .value=${this._providerMode} @change=${this._handleProviderModeChange}>
<option value="byok">BYOK (API Keys)</option>
<option value="local">Local AI (Ollama)</option>
</select>
</div>
<div class="form-group">
<label class="form-label">Profile</label>
<select class="control" .value=${this.selectedProfile} @change=${this._handleProfileChange}>
${profiles.map(profile => html`<option value=${profile.value}>${profile.label}</option>`)}
</select>
</div>
<div class="form-group vertical">
<label class="form-label">Custom Instructions</label>
<textarea
class="control"
placeholder="Resume details, role requirements, constraints..."
.value=${this._context}
@input=${e => this._saveContext(e.target.value)}
></textarea>
<div class="form-help">Sent as context at session start. Keep it short.</div>
</div>
</div>
</section>
</div>
</div>
`;
}
}
customElements.define('ai-customize-view', AICustomizeView);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,237 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { unifiedPageStyles } from './sharedPageStyles.js';
export class FeedbackView extends LitElement {
static styles = [
unifiedPageStyles,
css`
.feedback-form {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.feedback-input {
width: 100%;
padding: var(--space-sm) var(--space-md);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
background: var(--bg-elevated);
color: var(--text-primary);
font-size: var(--font-size-sm);
font-family: var(--font);
}
.feedback-input:focus {
outline: none;
border-color: var(--accent);
}
.feedback-input::placeholder {
color: var(--text-muted);
}
textarea.feedback-input {
min-height: 140px;
resize: vertical;
line-height: 1.45;
}
input.feedback-input {
max-width: 260px;
}
.feedback-row {
display: flex;
align-items: center;
gap: var(--space-sm);
}
.feedback-submit {
padding: var(--space-sm) var(--space-md);
border: none;
border-radius: var(--radius-sm);
background: var(--accent);
color: var(--btn-primary-text, #fff);
font-size: var(--font-size-sm);
font-weight: var(--font-weight-medium);
cursor: pointer;
transition: opacity var(--transition);
white-space: nowrap;
}
.feedback-submit:hover {
opacity: 0.85;
}
.feedback-submit:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.feedback-status {
font-size: var(--font-size-xs);
color: var(--text-muted);
}
.feedback-status.success {
color: var(--success);
}
.feedback-status.error {
color: var(--danger);
}
.attach-info {
display: flex;
align-items: center;
gap: var(--space-xs);
font-size: var(--font-size-xs);
color: var(--text-muted);
cursor: pointer;
user-select: none;
}
.attach-info input[type="checkbox"] {
cursor: pointer;
accent-color: var(--accent);
}
`,
];
static properties = {
_feedbackText: { state: true },
_feedbackEmail: { state: true },
_feedbackStatus: { state: true },
_feedbackSending: { state: true },
_attachInfo: { state: true },
_version: { state: true },
};
constructor() {
super();
this._feedbackText = '';
this._feedbackEmail = '';
this._feedbackStatus = '';
this._feedbackSending = false;
this._attachInfo = true;
this._version = '';
this._loadVersion();
}
async _loadVersion() {
try {
this._version = await cheatingDaddy.getVersion();
this.requestUpdate();
} catch (e) {}
}
_getOS() {
const p = navigator.platform || '';
if (p.includes('Mac')) return 'macOS';
if (p.includes('Win')) return 'Windows';
if (p.includes('Linux')) return 'Linux';
return p;
}
async _submitFeedback() {
const text = this._feedbackText.trim();
if (!text || this._feedbackSending) return;
let content = text;
if (this._attachInfo) {
content += `\n\nsent from ${this._getOS()} version ${this._version}`;
}
if (content.length > 2000) {
this._feedbackStatus = 'error:Max 2000 characters';
this.requestUpdate();
return;
}
this._feedbackSending = true;
this._feedbackStatus = '';
this.requestUpdate();
try {
const body = { feedback: content };
if (this._feedbackEmail.trim()) {
body.email = this._feedbackEmail.trim();
}
const res = await fetch('https://api.cheatingdaddy.com/api/feedback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (res.ok) {
this._feedbackText = '';
this._feedbackEmail = '';
this._feedbackStatus = 'success:Feedback sent, thank you!';
} else if (res.status === 429) {
this._feedbackStatus = 'error:Please wait a few minutes before sending again';
} else {
this._feedbackStatus = 'error:Failed to send feedback';
}
} catch (e) {
this._feedbackStatus = 'error:Could not connect to server';
}
this._feedbackSending = false;
this.requestUpdate();
}
render() {
return html`
<div class="unified-page">
<div class="unified-wrap">
<div class="page-title">Feedback</div>
<section class="surface">
<div class="feedback-form">
<textarea
class="feedback-input"
placeholder="Bug reports, feature requests, anything..."
.value=${this._feedbackText}
@input=${e => { this._feedbackText = e.target.value; }}
maxlength="2000"
></textarea>
<input
class="feedback-input"
type="email"
placeholder="Email (optional)"
.value=${this._feedbackEmail}
@input=${e => { this._feedbackEmail = e.target.value; }}
/>
<label class="attach-info">
<input
type="checkbox"
.checked=${this._attachInfo}
@change=${e => { this._attachInfo = e.target.checked; }}
/>
Attach OS and app version
</label>
<div class="feedback-row">
<button
class="feedback-submit"
@click=${() => this._submitFeedback()}
?disabled=${!this._feedbackText.trim() || this._feedbackSending}
>
${this._feedbackSending ? 'Sending...' : 'Send Feedback'}
</button>
${this._feedbackStatus ? html`
<span class="feedback-status ${this._feedbackStatus.split(':')[0]}">
${this._feedbackStatus.split(':').slice(1).join(':')}
</span>
` : ''}
</div>
</div>
</section>
</div>
</div>
`;
}
}
customElements.define('feedback-view', FeedbackView);

View File

@ -1,233 +1,95 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js'; import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { resizeLayout } from '../../utils/windowResize.js'; import { unifiedPageStyles } from './sharedPageStyles.js';
export class HelpView extends LitElement { export class HelpView extends LitElement {
static styles = css` static styles = [
* { unifiedPageStyles,
font-family: css`
'Inter', .shortcut-grid {
-apple-system, display: grid;
BlinkMacSystemFont, grid-template-columns: 1fr 1fr;
sans-serif; gap: var(--space-sm);
cursor: default; }
user-select: none;
}
:host { .shortcut-row {
display: block; display: flex;
padding: 0; align-items: center;
} justify-content: space-between;
gap: var(--space-sm);
padding: var(--space-sm);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
background: var(--bg-elevated);
}
.help-container { .shortcut-label {
display: flex; color: var(--text-secondary);
flex-direction: column; font-size: var(--font-size-xs);
} }
.option-group { .shortcut-keys {
padding: 16px 12px; display: inline-flex;
border-bottom: 1px solid var(--border-color); gap: 4px;
} flex-wrap: wrap;
justify-content: flex-end;
}
.option-group:last-child { .key {
border-bottom: none; border: 1px solid var(--border);
} border-radius: var(--radius-sm);
padding: 2px 6px;
font-size: var(--font-size-xs);
color: var(--text-primary);
background: var(--bg-surface);
font-family: var(--font-mono);
}
.option-label { .list {
font-size: 11px; display: grid;
font-weight: 600; gap: var(--space-sm);
color: var(--text-muted); }
text-transform: uppercase;
letter-spacing: 0.5px;
margin-bottom: 12px;
}
.description { .list-item {
color: var(--text-secondary); padding: var(--space-sm);
font-size: 12px; border: 1px solid var(--border);
line-height: 1.4; border-radius: var(--radius-sm);
user-select: text; color: var(--text-secondary);
cursor: text; font-size: var(--font-size-sm);
} line-height: 1.45;
background: var(--bg-elevated);
}
.description strong { .link-row {
color: var(--text-color); display: flex;
font-weight: 500; flex-wrap: wrap;
} gap: var(--space-sm);
}
.link { .link-button {
color: var(--text-color); border: 1px solid var(--border);
text-decoration: underline; border-radius: var(--radius-sm);
text-underline-offset: 2px; padding: 8px 10px;
cursor: pointer; background: var(--bg-elevated);
} color: var(--text-primary);
font-size: var(--font-size-sm);
cursor: pointer;
transition: border-color var(--transition), color var(--transition), background var(--transition);
}
.key { .link-button:hover {
background: var(--bg-tertiary); color: var(--text-primary);
color: var(--text-color); border-color: var(--accent);
border: 1px solid var(--border-color); background: rgba(63, 125, 229, 0.14);
padding: 2px 6px; }
border-radius: 3px;
font-size: 10px;
font-family: 'SF Mono', Monaco, monospace;
font-weight: 500;
margin: 0 1px;
white-space: nowrap;
}
.keyboard-section { @media (max-width: 820px) {
display: grid; .shortcut-grid {
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); grid-template-columns: 1fr;
gap: 12px; }
margin-top: 8px; }
}
.keyboard-group { `,
padding: 10px 0; ];
border-bottom: 1px solid var(--border-color);
}
.keyboard-group:last-child {
border-bottom: none;
}
.keyboard-group-title {
font-weight: 600;
font-size: 12px;
color: var(--text-color);
margin-bottom: 8px;
}
.shortcut-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 4px 0;
font-size: 11px;
}
.shortcut-description {
color: var(--text-secondary);
}
.shortcut-keys {
display: flex;
gap: 2px;
}
.profiles-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
gap: 8px;
margin-top: 8px;
}
.profile-item {
padding: 8px 0;
border-bottom: 1px solid var(--border-color);
}
.profile-item:last-child {
border-bottom: none;
}
.profile-name {
font-weight: 500;
font-size: 12px;
color: var(--text-color);
margin-bottom: 2px;
}
.profile-description {
font-size: 11px;
color: var(--text-muted);
line-height: 1.3;
}
.community-links {
display: flex;
gap: 8px;
flex-wrap: wrap;
}
.community-link {
display: flex;
align-items: center;
gap: 6px;
padding: 6px 10px;
background: transparent;
border: 1px solid var(--border-color);
border-radius: 3px;
color: var(--text-color);
font-size: 11px;
font-weight: 500;
transition: background 0.1s ease;
cursor: pointer;
}
.community-link:hover {
background: var(--hover-background);
}
.community-link svg {
width: 14px;
height: 14px;
flex-shrink: 0;
}
.open-logs-btn {
display: inline-flex;
align-items: center;
gap: 6px;
padding: 8px 14px;
background: var(--bg-tertiary);
border: 1px solid var(--border-color);
border-radius: 4px;
color: var(--text-color);
font-size: 12px;
font-weight: 500;
cursor: pointer;
transition: background 0.15s ease;
}
.open-logs-btn:hover {
background: var(--hover-background);
}
.usage-steps {
counter-reset: step-counter;
}
.usage-step {
counter-increment: step-counter;
position: relative;
padding-left: 24px;
margin-bottom: 8px;
font-size: 11px;
line-height: 1.4;
color: var(--text-secondary);
}
.usage-step::before {
content: counter(step-counter);
position: absolute;
left: 0;
top: 0;
width: 16px;
height: 16px;
background: var(--bg-tertiary);
color: var(--text-color);
border-radius: 3px;
display: flex;
align-items: center;
justify-content: center;
font-size: 10px;
font-weight: 600;
}
.usage-step strong {
color: var(--text-color);
}
`;
static properties = { static properties = {
onExternalLinkClick: { type: Function }, onExternalLinkClick: { type: Function },
@ -243,7 +105,7 @@ export class HelpView extends LitElement {
async _loadKeybinds() { async _loadKeybinds() {
try { try {
const keybinds = await mastermind.storage.getKeybinds(); const keybinds = await cheatingDaddy.storage.getKeybinds();
if (keybinds) { if (keybinds) {
this.keybinds = { ...this.getDefaultKeybinds(), ...keybinds }; this.keybinds = { ...this.getDefaultKeybinds(), ...keybinds };
this.requestUpdate(); this.requestUpdate();
@ -253,14 +115,8 @@ export class HelpView extends LitElement {
} }
} }
connectedCallback() {
super.connectedCallback();
// Resize window for this view
resizeLayout();
}
getDefaultKeybinds() { getDefaultKeybinds() {
const isMac = mastermind.isMacOS || navigator.platform.includes('Mac'); const isMac = cheatingDaddy.isMacOS || navigator.platform.includes('Mac');
return { return {
moveUp: isMac ? 'Alt+Up' : 'Ctrl+Up', moveUp: isMac ? 'Alt+Up' : 'Ctrl+Up',
moveDown: isMac ? 'Alt+Down' : 'Ctrl+Down', moveDown: isMac ? 'Alt+Down' : 'Ctrl+Down',
@ -276,253 +132,58 @@ export class HelpView extends LitElement {
}; };
} }
formatKeybind(keybind) { _formatKeybind(keybind) {
return keybind.split('+').map(key => html`<span class="key">${key}</span>`); return keybind.split('+').map(key => html`<span class="key">${key}</span>`);
} }
handleExternalLinkClick(url) { _open(url) {
this.onExternalLinkClick(url); this.onExternalLinkClick(url);
} }
render() { render() {
const isMacOS = mastermind.isMacOS || false; const shortcutRows = [
const isLinux = mastermind.isLinux || false; ['Move Window Up', this.keybinds.moveUp],
['Move Window Down', this.keybinds.moveDown],
['Move Window Left', this.keybinds.moveLeft],
['Move Window Right', this.keybinds.moveRight],
['Toggle Visibility', this.keybinds.toggleVisibility],
['Toggle Click-through', this.keybinds.toggleClickThrough],
['Ask Next Step', this.keybinds.nextStep],
['Previous Response', this.keybinds.previousResponse],
['Next Response', this.keybinds.nextResponse],
['Scroll Response Up', this.keybinds.scrollUp],
['Scroll Response Down', this.keybinds.scrollDown],
];
return html` return html`
<div class="help-container"> <div class="unified-page">
<div class="option-group"> <div class="unified-wrap">
<div class="option-label"> <div class="page-title">Help</div>
<span>Community & Support</span>
</div>
<div class="community-links">
<!-- <div class="community-link" @click=${() => this.handleExternalLinkClick('https://github.com/ShiftyX1/Mastermind')}>
<svg
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<path
d="M14 11.9976C14 9.5059 11.683 7 8.85714 7C8.52241 7 7.41904 7.00001 7.14286 7.00001C4.30254 7.00001 2 9.23752 2 11.9976C2 14.376 3.70973 16.3664 6 16.8714C6.36756 16.9525 6.75006 16.9952 7.14286 16.9952"
></path>
<path
d="M10 11.9976C10 14.4893 12.317 16.9952 15.1429 16.9952C15.4776 16.9952 16.581 16.9952 16.8571 16.9952C19.6975 16.9952 22 14.7577 22 11.9976C22 9.6192 20.2903 7.62884 18 7.12383C17.6324 7.04278 17.2499 6.99999 16.8571 6.99999"
></path>
</svg>
Website
</div> -->
<div class="community-link" @click=${() => this.handleExternalLinkClick('https://github.com/ShiftyX1/Mastermind')}>
<svg
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<path
d="M16 22.0268V19.1568C16.0375 18.68 15.9731 18.2006 15.811 17.7506C15.6489 17.3006 15.3929 16.8902 15.06 16.5468C18.2 16.1968 21.5 15.0068 21.5 9.54679C21.4997 8.15062 20.9627 6.80799 20 5.79679C20.4558 4.5753 20.4236 3.22514 19.91 2.02679C19.91 2.02679 18.73 1.67679 16 3.50679C13.708 2.88561 11.292 2.88561 8.99999 3.50679C6.26999 1.67679 5.08999 2.02679 5.08999 2.02679C4.57636 3.22514 4.54413 4.5753 4.99999 5.79679C4.03011 6.81549 3.49251 8.17026 3.49999 9.57679C3.49999 14.9968 6.79998 16.1868 9.93998 16.5768C9.61098 16.9168 9.35725 17.3222 9.19529 17.7667C9.03334 18.2112 8.96679 18.6849 8.99999 19.1568V22.0268"
></path>
<path d="M9 20.0267C6 20.9999 3.5 20.0267 2 17.0267"></path>
</svg>
GitHub
</div>
<!-- <div class="community-link" @click=${() => this.handleExternalLinkClick('https://discord.gg/GCBdubnXfJ')}>
<svg
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<path d="M5.5 16C10.5 18.5 13.5 18.5 18.5 16"></path>
<path
d="M15.5 17.5L16.5 19.5C16.5 19.5 20.6713 18.1717 22 16C22 15 22.5301 7.85339 19 5.5C17.5 4.5 15 4 15 4L14 6H12"
></path>
<path
d="M8.52832 17.5L7.52832 19.5C7.52832 19.5 3.35699 18.1717 2.02832 16C2.02832 15 1.49823 7.85339 5.02832 5.5C6.52832 4.5 9.02832 4 9.02832 4L10.0283 6H12.0283"
></path>
<path
d="M8.5 14C7.67157 14 7 13.1046 7 12C7 10.8954 7.67157 10 8.5 10C9.32843 10 10 10.8954 10 12C10 13.1046 9.32843 14 8.5 14Z"
></path>
<path
d="M15.5 14C14.6716 14 14 13.1046 14 12C14 10.8954 14.6716 10 15.5 10C16.3284 10 17 10.8954 17 12C17 13.1046 16.3284 14 15.5 14Z"
></path>
</svg>
Discord
</div> -->
</div>
</div>
<div class="option-group"> <section class="surface">
<div class="option-label"> <div class="surface-title">Support</div>
<span>Keyboard Shortcuts</span> <div class="link-row">
</div> <button class="link-button" @click=${() => this._open('https://cheatingdaddy.com')}>Website</button>
<div class="keyboard-section"> <button class="link-button" @click=${() => this._open('https://github.com/sohzm/cheating-daddy')}>GitHub</button>
<div class="keyboard-group"> <button class="link-button" @click=${() => this._open('https://discord.gg/GCBdubnXfJ')}>Discord</button>
<div class="keyboard-group-title">Window Movement</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window up</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveUp)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window down</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveDown)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window left</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveLeft)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window right</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveRight)}</div>
</div>
</div> </div>
</section>
<div class="keyboard-group"> <section class="surface">
<div class="keyboard-group-title">Window Control</div> <div class="surface-title">Keyboard Shortcuts</div>
<div class="shortcut-item"> <div class="shortcut-grid">
<span class="shortcut-description">Toggle click-through mode</span> ${shortcutRows.map(([label, keys]) => html`
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.toggleClickThrough)}</div> <div class="shortcut-row">
</div> <span class="shortcut-label">${label}</span>
<div class="shortcut-item"> <span class="shortcut-keys">${this._formatKeybind(keys)}</span>
<span class="shortcut-description">Toggle window visibility</span> </div>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.toggleVisibility)}</div> `)}
</div>
</div> </div>
</section>
<div class="keyboard-group">
<div class="keyboard-group-title">AI Actions</div>
<div class="shortcut-item">
<span class="shortcut-description">Take screenshot and ask for next step</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.nextStep)}</div>
</div>
</div>
<div class="keyboard-group">
<div class="keyboard-group-title">Response Navigation</div>
<div class="shortcut-item">
<span class="shortcut-description">Previous response</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.previousResponse)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Next response</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.nextResponse)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Scroll response up</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.scrollUp)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Scroll response down</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.scrollDown)}</div>
</div>
</div>
<div class="keyboard-group">
<div class="keyboard-group-title">Text Input</div>
<div class="shortcut-item">
<span class="shortcut-description">Send message to AI</span>
<div class="shortcut-keys"><span class="key">Enter</span></div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">New line in text input</span>
<div class="shortcut-keys"><span class="key">Shift</span><span class="key">Enter</span></div>
</div>
</div>
</div>
<div class="description" style="margin-top: 12px; text-align: center;">You can customize these shortcuts in Settings.</div>
</div>
<div class="option-group">
<div class="option-label">
<span>How to Use</span>
</div>
<div class="usage-steps">
<div class="usage-step"><strong>Start a Session:</strong> Enter your AI Provider API key and click "Start Session"</div>
<div class="usage-step"><strong>Customize:</strong> Choose your profile and language in the settings</div>
<div class="usage-step">
<strong>Position Window:</strong> Use keyboard shortcuts to move the window to your desired location
</div>
<div class="usage-step">
<strong>Click-through Mode:</strong> Use ${this.formatKeybind(this.keybinds.toggleClickThrough)} to make the window
click-through
</div>
<div class="usage-step"><strong>Get AI Help:</strong> The AI will analyze your screen and audio to provide assistance</div>
<div class="usage-step"><strong>Text Messages:</strong> Type questions or requests to the AI using the text input</div>
<div class="usage-step">
<strong>Navigate Responses:</strong> Use ${this.formatKeybind(this.keybinds.previousResponse)} and
${this.formatKeybind(this.keybinds.nextResponse)} to browse through AI responses
</div>
</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Supported Profiles</span>
</div>
<div class="profiles-grid">
<div class="profile-item">
<div class="profile-name">Job Interview</div>
<div class="profile-description">Get help with interview questions and responses</div>
</div>
<div class="profile-item">
<div class="profile-name">Sales Call</div>
<div class="profile-description">Assistance with sales conversations and objection handling</div>
</div>
<div class="profile-item">
<div class="profile-name">Business Meeting</div>
<div class="profile-description">Support for professional meetings and discussions</div>
</div>
<div class="profile-item">
<div class="profile-name">Presentation</div>
<div class="profile-description">Help with presentations and public speaking</div>
</div>
<div class="profile-item">
<div class="profile-name">Negotiation</div>
<div class="profile-description">Guidance for business negotiations and deals</div>
</div>
<div class="profile-item">
<div class="profile-name">Exam Assistant</div>
<div class="profile-description">Academic assistance for test-taking and exam questions</div>
</div>
</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Audio Input</span>
</div>
<div class="description">The AI listens to conversations and provides contextual assistance based on what it hears.</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Troubleshooting</span>
</div>
<div class="description" style="margin-bottom: 12px;">
If you're experiencing issues with audio capture or other features, check the application logs for diagnostic information.
</div>
<button class="open-logs-btn" @click=${this.openLogsFolder}>📁 Open Logs Folder</button>
</div> </div>
</div> </div>
`; `;
} }
async openLogsFolder() {
try {
const { ipcRenderer } = require('electron');
const result = await ipcRenderer.invoke('open-logs-folder');
if (!result.success) {
console.error('Failed to open logs folder:', result.error);
}
} catch (err) {
console.error('Error opening logs folder:', err);
}
}
} }
customElements.define('help-view', HelpView); customElements.define('help-view', HelpView);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -3,13 +3,7 @@ import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class OnboardingView extends LitElement { export class OnboardingView extends LitElement {
static styles = css` static styles = css`
* { * {
font-family: font-family: var(--font);
'Inter',
-apple-system,
BlinkMacSystemFont,
'Segoe UI',
Roboto,
sans-serif;
cursor: default; cursor: default;
user-select: none; user-select: none;
margin: 0; margin: 0;
@ -27,44 +21,20 @@ export class OnboardingView extends LitElement {
overflow: hidden; overflow: hidden;
} }
.onboarding-container { .onboarding {
position: relative;
width: 100%; width: 100%;
height: 100%; height: 100%;
background: #0a0a0a; position: relative;
overflow: hidden;
}
.close-button {
position: absolute;
top: 12px;
right: 12px;
z-index: 10;
background: rgba(255, 255, 255, 0.08);
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: 6px;
width: 32px;
height: 32px;
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
cursor: pointer; border-radius: 12px;
transition: all 0.2s ease; border: 1px solid rgba(0, 0, 0, 0.08);
color: rgba(255, 255, 255, 0.6); overflow: hidden;
background: #f0f0f0;
} }
.close-button:hover { canvas.aurora {
background: rgba(255, 255, 255, 0.12);
border-color: rgba(255, 255, 255, 0.2);
color: rgba(255, 255, 255, 0.9);
}
.close-button svg {
width: 16px;
height: 16px;
}
.gradient-canvas {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
@ -73,617 +43,308 @@ export class OnboardingView extends LitElement {
z-index: 0; z-index: 0;
} }
.content-wrapper { canvas.dither {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
right: 0; width: 100%;
bottom: 60px; height: 100%;
z-index: 1; z-index: 1;
display: flex; opacity: 0.12;
flex-direction: column; mix-blend-mode: overlay;
justify-content: center; pointer-events: none;
padding: 32px 48px; image-rendering: pixelated;
max-width: 500px;
color: #e5e5e5;
overflow: hidden;
} }
.slide-icon { .slide {
width: 48px; position: relative;
height: 48px; z-index: 2;
margin-bottom: 16px; display: flex;
opacity: 0.9; flex-direction: column;
display: block; align-items: center;
text-align: center;
max-width: 400px;
padding: var(--space-xl);
gap: var(--space-md);
} }
.slide-title { .slide-title {
font-size: 28px; font-size: 28px;
font-weight: 600; font-weight: 600;
margin-bottom: 12px; color: #111111;
color: #ffffff; line-height: 1.2;
line-height: 1.3;
} }
.slide-content { .slide-text {
font-size: 16px; font-size: 13px;
line-height: 1.5; line-height: 1.5;
margin-bottom: 24px; color: #666666;
color: #b8b8b8;
font-weight: 400;
} }
.context-textarea { .context-input {
width: 100%; width: 100%;
height: 100px; min-height: 120px;
padding: 16px; padding: 12px;
border: 1px solid rgba(255, 255, 255, 0.1); border: 1px solid rgba(0, 0, 0, 0.12);
border-radius: 8px; border-radius: 8px;
background: rgba(255, 255, 255, 0.05); background: rgba(255, 255, 255, 0.7);
color: #e5e5e5; backdrop-filter: blur(8px);
font-size: 14px; color: #111111;
font-family: inherit; font-size: 13px;
font-family: var(--font);
line-height: 1.5;
resize: vertical; resize: vertical;
transition: all 0.2s ease; text-align: left;
margin-bottom: 24px;
} }
.context-textarea::placeholder { .context-input::placeholder {
color: rgba(255, 255, 255, 0.4); color: #999999;
font-size: 14px;
} }
.context-textarea:focus { .context-input:focus {
outline: none; outline: none;
border-color: rgba(255, 255, 255, 0.2); border-color: rgba(0, 0, 0, 0.3);
background: rgba(255, 255, 255, 0.08);
} }
.feature-list { .actions {
max-width: 100%;
}
.feature-item {
display: flex; display: flex;
flex-direction: column;
align-items: center; align-items: center;
margin-bottom: 12px; gap: 8px;
font-size: 15px; margin-top: 8px;
color: #b8b8b8;
} }
.feature-icon { .btn-primary {
font-size: 16px; background: #111111;
margin-right: 12px; border: none;
opacity: 0.8;
}
.migration-buttons {
display: flex;
gap: 12px;
margin-top: 24px;
}
.migration-button {
flex: 1;
padding: 12px 24px;
border-radius: 8px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
border: 1px solid rgba(255, 255, 255, 0.1);
}
.migration-button.primary {
background: rgba(59, 130, 246, 0.8);
color: #ffffff; color: #ffffff;
border-color: rgba(59, 130, 246, 0.9); padding: 10px 32px;
} border-radius: 8px;
.migration-button.primary:hover {
background: rgba(59, 130, 246, 0.9);
border-color: rgba(59, 130, 246, 1);
}
.migration-button.secondary {
background: rgba(255, 255, 255, 0.08);
color: #e5e5e5;
border-color: rgba(255, 255, 255, 0.1);
}
.migration-button.secondary:hover {
background: rgba(255, 255, 255, 0.12);
border-color: rgba(255, 255, 255, 0.2);
}
.migration-button:active {
transform: scale(0.98);
}
.navigation {
position: absolute;
bottom: 0;
left: 0;
right: 0;
z-index: 2;
display: flex;
align-items: center;
justify-content: space-between;
padding: 16px 24px;
background: rgba(0, 0, 0, 0.3);
backdrop-filter: blur(10px);
border-top: 1px solid rgba(255, 255, 255, 0.05);
height: 60px;
box-sizing: border-box;
}
.nav-button {
background: rgba(255, 255, 255, 0.08);
border: 1px solid rgba(255, 255, 255, 0.1);
color: #e5e5e5;
padding: 8px 16px;
border-radius: 6px;
font-size: 13px; font-size: 13px;
font-weight: 500; font-weight: 500;
cursor: pointer; cursor: pointer;
transition: all 0.2s ease; transition: opacity 0.15s;
display: flex;
align-items: center;
justify-content: center;
min-width: 36px;
min-height: 36px;
} }
.nav-button:hover { .btn-primary:hover {
background: rgba(255, 255, 255, 0.12); opacity: 0.85;
border-color: rgba(255, 255, 255, 0.2);
} }
.nav-button:active { .btn-back {
transform: scale(0.98); background: none;
} border: none;
color: #888888;
.nav-button:disabled { font-size: 11px;
opacity: 0.4;
cursor: not-allowed;
}
.nav-button:disabled:hover {
background: rgba(255, 255, 255, 0.08);
border-color: rgba(255, 255, 255, 0.1);
transform: none;
}
.progress-dots {
display: flex;
gap: 12px;
align-items: center;
}
.dot {
width: 8px;
height: 8px;
border-radius: 50%;
background: rgba(255, 255, 255, 0.2);
transition: all 0.2s ease;
cursor: pointer; cursor: pointer;
padding: 4px 8px;
} }
.dot:hover { .btn-back:hover {
background: rgba(255, 255, 255, 0.4); color: #555555;
}
.dot.active {
background: rgba(255, 255, 255, 0.8);
transform: scale(1.2);
} }
`; `;
static properties = { static properties = {
currentSlide: { type: Number }, currentSlide: { type: Number },
contextText: { type: String }, contextText: { type: String },
hasOldConfig: { type: Boolean },
onComplete: { type: Function }, onComplete: { type: Function },
onClose: { type: Function },
}; };
constructor() { constructor() {
super(); super();
this.currentSlide = 0; this.currentSlide = 0;
this.contextText = ''; this.contextText = '';
this.hasOldConfig = false;
this.onComplete = () => {}; this.onComplete = () => {};
this.onClose = () => {}; this._animId = null;
this.canvas = null; this._time = 0;
this.ctx = null;
this.animationId = null;
// Transition properties
this.isTransitioning = false;
this.transitionStartTime = 0;
this.transitionDuration = 800; // 800ms fade duration
this.previousColorScheme = null;
// Subtle dark color schemes for each slide
this.colorSchemes = [
// Slide 1 - Welcome (Very dark purple/gray)
[
[25, 25, 35], // Dark gray-purple
[20, 20, 30], // Darker gray
[30, 25, 40], // Slightly purple
[15, 15, 25], // Very dark
[35, 30, 45], // Muted purple
[10, 10, 20], // Almost black
],
// Slide 2 - Privacy (Dark blue-gray)
[
[20, 25, 35], // Dark blue-gray
[15, 20, 30], // Darker blue-gray
[25, 30, 40], // Slightly blue
[10, 15, 25], // Very dark blue
[30, 35, 45], // Muted blue
[5, 10, 20], // Almost black
],
// Slide 3 - Context (Dark neutral)
[
[25, 25, 25], // Neutral dark
[20, 20, 20], // Darker neutral
[30, 30, 30], // Light dark
[15, 15, 15], // Very dark
[35, 35, 35], // Lighter dark
[10, 10, 10], // Almost black
],
// Slide 4 - Features (Dark green-gray)
[
[20, 30, 25], // Dark green-gray
[15, 25, 20], // Darker green-gray
[25, 35, 30], // Slightly green
[10, 20, 15], // Very dark green
[30, 40, 35], // Muted green
[5, 15, 10], // Almost black
],
// Slide 5 - Migration (Dark teal-gray)
[
[20, 30, 30], // Dark teal-gray
[15, 25, 25], // Darker teal-gray
[25, 35, 35], // Slightly teal
[10, 20, 20], // Very dark teal
[30, 40, 40], // Muted teal
[5, 15, 15], // Almost black
],
// Slide 6 - Complete (Dark warm gray)
[
[30, 25, 20], // Dark warm gray
[25, 20, 15], // Darker warm
[35, 30, 25], // Slightly warm
[20, 15, 10], // Very dark warm
[40, 35, 30], // Muted warm
[15, 10, 5], // Almost black
],
];
} }
async firstUpdated() { firstUpdated() {
this.canvas = this.shadowRoot.querySelector('.gradient-canvas'); this._startAurora();
this.ctx = this.canvas.getContext('2d'); this._drawDither();
this.resizeCanvas();
this.startGradientAnimation();
window.addEventListener('resize', () => this.resizeCanvas());
// Check if old config exists
if (window.mastermind && window.mastermind.storage) {
try {
this.hasOldConfig = await window.mastermind.storage.hasOldConfig();
console.log('Has old config:', this.hasOldConfig);
this.requestUpdate(); // Force re-render with new hasOldConfig value
} catch (error) {
console.error('Error checking old config:', error);
this.hasOldConfig = false;
}
}
} }
disconnectedCallback() { disconnectedCallback() {
super.disconnectedCallback(); super.disconnectedCallback();
if (this.animationId) { if (this._animId) cancelAnimationFrame(this._animId);
cancelAnimationFrame(this.animationId); }
_drawDither() {
const canvas = this.shadowRoot.querySelector('canvas.dither');
if (!canvas) return;
const blockSize = 5;
const cols = Math.ceil(canvas.offsetWidth / blockSize);
const rows = Math.ceil(canvas.offsetHeight / blockSize);
canvas.width = cols;
canvas.height = rows;
const ctx = canvas.getContext('2d');
const img = ctx.createImageData(cols, rows);
for (let i = 0; i < img.data.length; i += 4) {
const v = Math.random() > 0.5 ? 255 : 0;
img.data[i] = v;
img.data[i + 1] = v;
img.data[i + 2] = v;
img.data[i + 3] = 255;
} }
window.removeEventListener('resize', () => this.resizeCanvas()); ctx.putImageData(img, 0, 0);
} }
resizeCanvas() { _startAurora() {
if (!this.canvas) return; const canvas = this.shadowRoot.querySelector('canvas.aurora');
if (!canvas) return;
const ctx = canvas.getContext('2d');
const rect = this.getBoundingClientRect(); const scale = 0.35;
this.canvas.width = rect.width; const resize = () => {
this.canvas.height = rect.height; canvas.width = Math.floor(canvas.offsetWidth * scale);
} canvas.height = Math.floor(canvas.offsetHeight * scale);
};
resize();
startGradientAnimation() { const blobs = [
if (!this.ctx) return; { parts: [
{ ox: 0, oy: 0, r: 1.0 },
{ ox: 0.22, oy: 0.1, r: 0.85 },
{ ox: 0.11, oy: 0.05, r: 0.5 },
], color: [180, 200, 230], x: 0.15, y: 0.2, vx: 0.35, vy: 0.25, phase: 0 },
const animate = timestamp => { { parts: [
this.drawGradient(timestamp); { ox: 0, oy: 0, r: 0.95 },
this.animationId = requestAnimationFrame(animate); { ox: 0.18, oy: -0.08, r: 0.75 },
{ ox: 0.09, oy: -0.04, r: 0.4 },
], color: [190, 180, 220], x: 0.75, y: 0.2, vx: -0.3, vy: 0.35, phase: 1.2 },
{ parts: [
{ ox: 0, oy: 0, r: 0.9 },
{ ox: 0.24, oy: 0.12, r: 0.9 },
{ ox: 0.12, oy: 0.06, r: 0.35 },
], color: [210, 195, 215], x: 0.5, y: 0.65, vx: 0.25, vy: -0.3, phase: 2.4 },
{ parts: [
{ ox: 0, oy: 0, r: 0.8 },
{ ox: -0.15, oy: 0.18, r: 0.7 },
{ ox: -0.07, oy: 0.09, r: 0.45 },
], color: [175, 210, 210], x: 0.1, y: 0.75, vx: 0.4, vy: 0.2, phase: 3.6 },
{ parts: [
{ ox: 0, oy: 0, r: 0.75 },
{ ox: 0.12, oy: -0.15, r: 0.65 },
{ ox: 0.06, oy: -0.07, r: 0.35 },
], color: [220, 210, 195], x: 0.85, y: 0.55, vx: -0.28, vy: -0.32, phase: 4.8 },
{ parts: [
{ ox: 0, oy: 0, r: 0.95 },
{ ox: -0.2, oy: -0.12, r: 0.75 },
{ ox: -0.1, oy: -0.06, r: 0.4 },
], color: [170, 190, 225], x: 0.6, y: 0.1, vx: -0.2, vy: 0.38, phase: 6.0 },
{ parts: [
{ ox: 0, oy: 0, r: 0.85 },
{ ox: 0.17, oy: 0.15, r: 0.75 },
{ ox: 0.08, oy: 0.07, r: 0.35 },
], color: [200, 190, 220], x: 0.35, y: 0.4, vx: 0.32, vy: -0.22, phase: 7.2 },
{ parts: [
{ ox: 0, oy: 0, r: 0.75 },
{ ox: -0.13, oy: 0.18, r: 0.65 },
{ ox: -0.06, oy: 0.1, r: 0.4 },
], color: [215, 205, 200], x: 0.9, y: 0.85, vx: -0.35, vy: -0.25, phase: 8.4 },
{ parts: [
{ ox: 0, oy: 0, r: 0.7 },
{ ox: 0.16, oy: -0.1, r: 0.6 },
{ ox: 0.08, oy: -0.05, r: 0.35 },
], color: [185, 210, 205], x: 0.45, y: 0.9, vx: 0.22, vy: -0.4, phase: 9.6 },
];
const baseRadius = 0.32;
const draw = () => {
this._time += 0.012;
const w = canvas.width;
const h = canvas.height;
const dim = Math.min(w, h);
ctx.fillStyle = '#f0f0f0';
ctx.fillRect(0, 0, w, h);
for (const blob of blobs) {
const t = this._time;
const cx = (blob.x + Math.sin(t * blob.vx + blob.phase) * 0.22) * w;
const cy = (blob.y + Math.cos(t * blob.vy + blob.phase * 0.7) * 0.22) * h;
for (const part of blob.parts) {
const wobble = Math.sin(t * 2.5 + part.ox * 25 + blob.phase) * 0.02;
const px = cx + (part.ox + wobble) * dim;
const py = cy + (part.oy + wobble * 0.7) * dim;
const pr = part.r * baseRadius * dim;
const grad = ctx.createRadialGradient(px, py, 0, px, py, pr);
grad.addColorStop(0, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.55)`);
grad.addColorStop(0.4, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.3)`);
grad.addColorStop(0.7, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.1)`);
grad.addColorStop(1, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0)`);
ctx.fillStyle = grad;
ctx.fillRect(0, 0, w, h);
}
}
this._animId = requestAnimationFrame(draw);
}; };
animate(0); draw();
}
drawGradient(timestamp) {
if (!this.ctx || !this.canvas) return;
const { width, height } = this.canvas;
let colors = this.colorSchemes[this.currentSlide];
// Handle color scheme transitions
if (this.isTransitioning && this.previousColorScheme) {
const elapsed = timestamp - this.transitionStartTime;
const progress = Math.min(elapsed / this.transitionDuration, 1);
// Use easing function for smoother transition
const easedProgress = this.easeInOutCubic(progress);
colors = this.interpolateColorSchemes(this.previousColorScheme, this.colorSchemes[this.currentSlide], easedProgress);
// End transition when complete
if (progress >= 1) {
this.isTransitioning = false;
this.previousColorScheme = null;
}
}
const time = timestamp * 0.0005; // Much slower animation
// Create moving gradient with subtle flow
const flowX = Math.sin(time * 0.7) * width * 0.3;
const flowY = Math.cos(time * 0.5) * height * 0.2;
const gradient = this.ctx.createLinearGradient(flowX, flowY, width + flowX * 0.5, height + flowY * 0.5);
// Very subtle color variations with movement
colors.forEach((color, index) => {
const offset = index / (colors.length - 1);
const wave = Math.sin(time + index * 0.3) * 0.05; // Very subtle wave
const r = Math.max(0, Math.min(255, color[0] + wave * 5));
const g = Math.max(0, Math.min(255, color[1] + wave * 5));
const b = Math.max(0, Math.min(255, color[2] + wave * 5));
gradient.addColorStop(offset, `rgb(${r}, ${g}, ${b})`);
});
// Fill with moving gradient
this.ctx.fillStyle = gradient;
this.ctx.fillRect(0, 0, width, height);
// Add a second layer with radial gradient for more depth
const centerX = width * 0.5 + Math.sin(time * 0.3) * width * 0.15;
const centerY = height * 0.5 + Math.cos(time * 0.4) * height * 0.1;
const radius = Math.max(width, height) * 0.8;
const radialGradient = this.ctx.createRadialGradient(centerX, centerY, 0, centerX, centerY, radius);
// Very subtle radial overlay
radialGradient.addColorStop(0, `rgba(${colors[0][0] + 10}, ${colors[0][1] + 10}, ${colors[0][2] + 10}, 0.1)`);
radialGradient.addColorStop(0.5, `rgba(${colors[2][0]}, ${colors[2][1]}, ${colors[2][2]}, 0.05)`);
radialGradient.addColorStop(
1,
`rgba(${colors[colors.length - 1][0]}, ${colors[colors.length - 1][1]}, ${colors[colors.length - 1][2]}, 0.03)`
);
this.ctx.globalCompositeOperation = 'overlay';
this.ctx.fillStyle = radialGradient;
this.ctx.fillRect(0, 0, width, height);
this.ctx.globalCompositeOperation = 'source-over';
}
nextSlide() {
if (this.currentSlide < 5) {
this.startColorTransition(this.currentSlide + 1);
} else {
this.completeOnboarding();
}
}
prevSlide() {
if (this.currentSlide > 0) {
this.startColorTransition(this.currentSlide - 1);
}
}
startColorTransition(newSlide) {
this.previousColorScheme = [...this.colorSchemes[this.currentSlide]];
this.currentSlide = newSlide;
this.isTransitioning = true;
this.transitionStartTime = performance.now();
}
// Interpolate between two color schemes
interpolateColorSchemes(scheme1, scheme2, progress) {
return scheme1.map((color1, index) => {
const color2 = scheme2[index];
return [
color1[0] + (color2[0] - color1[0]) * progress,
color1[1] + (color2[1] - color1[1]) * progress,
color1[2] + (color2[2] - color1[2]) * progress,
];
});
}
// Easing function for smooth transitions
easeInOutCubic(t) {
return t < 0.5 ? 4 * t * t * t : 1 - Math.pow(-2 * t + 2, 3) / 2;
} }
handleContextInput(e) { handleContextInput(e) {
this.contextText = e.target.value; this.contextText = e.target.value;
} }
async handleClose() {
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('quit-application');
}
}
async handleMigrate() {
const success = await window.mastermind.storage.migrateFromOldConfig();
if (success) {
console.log('Migration completed successfully');
}
this.nextSlide();
}
async handleSkipMigration() {
this.nextSlide();
}
async completeOnboarding() { async completeOnboarding() {
if (this.contextText.trim()) { if (this.contextText.trim()) {
await mastermind.storage.updatePreference('customPrompt', this.contextText.trim()); await cheatingDaddy.storage.updatePreference('customPrompt', this.contextText.trim());
} }
await mastermind.storage.updateConfig('onboarded', true); await cheatingDaddy.storage.updateConfig('onboarded', true);
this.onComplete(); this.onComplete();
} }
getSlideContent() { renderSlide() {
const slides = [ if (this.currentSlide === 0) {
{ return html`
icon: 'assets/onboarding/welcome.svg', <div class="slide">
title: 'Welcome to Mastermind', <div class="slide-title">Mastermind</div>
content: <div class="slide-text">Real-time AI that listens, watches, and helps during interviews, meetings, and exams.</div>
'Your AI assistant that listens and watches, then provides intelligent suggestions automatically during interviews, meetings, and presentations.', <div class="actions">
}, <button class="btn-primary" @click=${() => { this.currentSlide = 1; }}>Continue</button>
{ </div>
icon: 'assets/onboarding/security.svg', </div>
title: 'Completely Private', `;
content: 'Invisible to screen sharing apps and recording software. Your secret advantage stays completely hidden from others.', }
},
{
icon: 'assets/onboarding/context.svg',
title: 'Add Your Context',
content: 'Share relevant information to help the AI provide better, more personalized assistance.',
showTextarea: true,
},
{
icon: 'assets/onboarding/customize.svg',
title: 'Additional Features',
content: '',
showFeatures: true,
},
{
icon: 'assets/onboarding/context.svg',
title: 'Migrate Settings?',
content: this.hasOldConfig
? 'Mastermind is a fork of Cheating Daddy. We detected existing Cheating Daddy settings on your system. Would you like to automatically migrate your settings, API keys, and history?'
: 'Mastermind is a fork of Cheating Daddy. No previous settings were detected.',
showMigration: this.hasOldConfig,
},
{
icon: 'assets/onboarding/ready.svg',
title: 'Ready to Go',
content: 'Choose your AI Provider and start getting AI-powered assistance in real-time.',
},
];
return slides[this.currentSlide]; return html`
<div class="slide">
<div class="slide-title">Add context</div>
<div class="slide-text">Paste your resume or any info the AI should know. You can skip this and add it later.</div>
<textarea
class="context-input"
placeholder="Resume, job description, notes..."
.value=${this.contextText}
@input=${this.handleContextInput}
></textarea>
<div class="actions">
<button class="btn-primary" @click=${this.completeOnboarding}>Get Started</button>
<button class="btn-back" @click=${() => { this.currentSlide = 0; }}>Back</button>
</div>
</div>
`;
} }
render() { render() {
const slide = this.getSlideContent();
return html` return html`
<div class="onboarding-container"> <div class="onboarding">
<button class="close-button" @click=${this.handleClose} title="Close"> <canvas class="aurora"></canvas>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <canvas class="dither"></canvas>
<path ${this.renderSlide()}
d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
/>
</svg>
</button>
<canvas class="gradient-canvas"></canvas>
<div class="content-wrapper">
<img class="slide-icon" src="${slide.icon}" alt="${slide.title} icon" />
<div class="slide-title">${slide.title}</div>
<div class="slide-content">${slide.content}</div>
${slide.showTextarea
? html`
<textarea
class="context-textarea"
placeholder="Paste your resume, job description, or any relevant context here..."
.value=${this.contextText}
@input=${this.handleContextInput}
></textarea>
`
: ''}
${slide.showFeatures
? html`
<div class="feature-list">
<div class="feature-item">
<span class="feature-icon">-</span>
Customize AI behavior and responses
</div>
<div class="feature-item">
<span class="feature-icon">-</span>
Review conversation history
</div>
<div class="feature-item">
<span class="feature-icon">-</span>
Adjust capture settings and intervals
</div>
</div>
`
: ''}
${slide.showMigration
? html`
<div class="migration-buttons">
<button class="migration-button primary" @click=${this.handleMigrate}>
Migrate Settings
</button>
<button class="migration-button secondary" @click=${this.handleSkipMigration}>
Start Fresh
</button>
</div>
`
: ''}
</div>
<div class="navigation">
<button class="nav-button" @click=${this.prevSlide} ?disabled=${this.currentSlide === 0}>
<svg width="16px" height="16px" stroke-width="2" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M15 6L9 12L15 18" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"></path>
</svg>
</button>
<div class="progress-dots">
${[0, 1, 2, 3, 4, 5].map(
index => html`
<div
class="dot ${index === this.currentSlide ? 'active' : ''}"
@click=${() => {
if (index !== this.currentSlide) {
this.startColorTransition(index);
}
}}
></div>
`
)}
</div>
<button class="nav-button" @click=${this.nextSlide}>
${this.currentSlide === 5
? 'Get Started'
: html`
<svg width="16px" height="16px" stroke-width="2" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9 6L15 12L9 18" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"></path>
</svg>
`}
</button>
</div>
</div> </div>
`; `;
} }

View File

@ -1,175 +0,0 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class ScreenPickerDialog extends LitElement {
static properties = {
sources: { type: Array },
visible: { type: Boolean },
};
static styles = css`
:host {
display: none;
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: rgba(0, 0, 0, 0.8);
z-index: 10000;
align-items: center;
justify-content: center;
}
:host([visible]) {
display: flex;
}
.dialog {
background: var(--background-color);
border: 1px solid var(--border-color);
border-radius: 8px;
padding: 24px;
max-width: 800px;
max-height: 80vh;
overflow-y: auto;
}
h2 {
margin: 0 0 16px 0;
color: var(--text-color);
font-size: 18px;
font-weight: 500;
}
.sources-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
gap: 12px;
margin-bottom: 16px;
}
.source-item {
background: var(--input-background);
border: 2px solid transparent;
border-radius: 6px;
padding: 12px;
cursor: pointer;
transition: all 0.2s ease;
}
.source-item:hover {
border-color: var(--border-default);
background: var(--button-hover);
}
.source-item.selected {
border-color: var(--accent-color);
background: var(--button-hover);
}
.source-thumbnail {
width: 100%;
height: 120px;
object-fit: contain;
background: #1a1a1a;
border-radius: 4px;
margin-bottom: 8px;
}
.source-name {
color: var(--text-color);
font-size: 13px;
text-align: center;
word-break: break-word;
}
.buttons {
display: flex;
gap: 8px;
justify-content: flex-end;
}
button {
background: var(--button-background);
color: var(--text-color);
border: 1px solid var(--border-color);
padding: 8px 16px;
border-radius: 3px;
cursor: pointer;
font-size: 13px;
transition: background-color 0.1s ease;
}
button:hover {
background: var(--button-hover);
}
button.primary {
background: var(--accent-color);
color: white;
border-color: var(--accent-color);
}
button.primary:hover {
background: var(--accent-hover);
}
button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
`;
constructor() {
super();
this.sources = [];
this.visible = false;
this.selectedSource = null;
}
selectSource(source) {
this.selectedSource = source;
this.requestUpdate();
}
confirm() {
if (this.selectedSource) {
this.dispatchEvent(
new CustomEvent('source-selected', {
detail: { source: this.selectedSource },
})
);
}
}
cancel() {
this.dispatchEvent(new CustomEvent('cancelled'));
}
render() {
return html`
<div class="dialog">
<h2>Choose screen or window to share</h2>
<div class="sources-grid">
${this.sources.map(
source => html`
<div
class="source-item ${this.selectedSource?.id === source.id ? 'selected' : ''}"
@click=${() => this.selectSource(source)}
>
<img class="source-thumbnail" src="${source.thumbnail}" alt="${source.name}" />
<div class="source-name">${source.name}</div>
</div>
`
)}
</div>
<div class="buttons">
<button @click=${this.cancel}>Cancel</button>
<button class="primary" @click=${this.confirm} ?disabled=${!this.selectedSource}>Share</button>
</div>
</div>
`;
}
}
customElements.define('screen-picker-dialog', ScreenPickerDialog);

View File

@ -0,0 +1,172 @@
import { css } from '../../assets/lit-core-2.7.4.min.js';
export const unifiedPageStyles = css`
* {
box-sizing: border-box;
font-family: var(--font);
cursor: default;
user-select: none;
}
:host {
display: block;
height: 100%;
}
.unified-page {
height: 100%;
overflow-y: auto;
padding: var(--space-lg);
background: var(--bg-app);
}
.unified-wrap {
width: 100%;
max-width: 1160px;
margin: 0 auto;
display: flex;
flex-direction: column;
gap: var(--space-md);
min-height: 100%;
}
.page-title {
font-size: var(--font-size-xl);
font-weight: var(--font-weight-semibold);
color: var(--text-primary);
margin-bottom: 4px;
}
.page-subtitle {
color: var(--text-muted);
font-size: var(--font-size-sm);
}
.surface {
border: 1px solid var(--border);
border-radius: var(--radius-md);
background: var(--bg-surface);
padding: var(--space-md);
}
.surface-title {
color: var(--text-primary);
font-size: var(--font-size-md);
font-weight: var(--font-weight-semibold);
margin-bottom: 4px;
}
.surface-subtitle {
color: var(--text-muted);
font-size: var(--font-size-xs);
margin-bottom: var(--space-md);
}
.form-grid {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.form-row {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.form-group {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--space-md);
}
.form-group.vertical {
flex-direction: column;
align-items: stretch;
}
.form-label {
color: var(--text-secondary);
font-size: var(--font-size-sm);
white-space: nowrap;
flex-shrink: 0;
}
.form-help {
color: var(--text-muted);
font-size: var(--font-size-xs);
line-height: 1.4;
}
.control {
width: 200px;
background: var(--bg-elevated);
color: var(--text-primary);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
padding: 8px 12px;
font-size: var(--font-size-sm);
transition: border-color var(--transition), box-shadow var(--transition);
}
.control:hover:not(:focus) {
border-color: var(--border-strong);
}
.control:focus {
outline: none;
border-color: var(--accent);
box-shadow: 0 0 0 1px var(--accent);
}
select.control {
appearance: none;
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b6b6b' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e");
background-position: right 8px center;
background-repeat: no-repeat;
background-size: 12px;
padding-right: 28px;
cursor: pointer;
}
textarea.control {
width: 100%;
min-height: 100px;
resize: vertical;
line-height: 1.45;
}
.chip {
display: inline-flex;
align-items: center;
border-radius: var(--radius-sm);
background: var(--bg-elevated);
color: var(--text-secondary);
padding: 2px 8px;
font-size: var(--font-size-xs);
font-family: var(--font-mono);
}
.pill {
border: 1px solid var(--border);
border-radius: 999px;
padding: 2px 8px;
font-size: var(--font-size-xs);
color: var(--text-muted);
}
.muted {
color: var(--text-muted);
}
.danger {
color: var(--danger);
}
@media (max-width: 640px) {
.unified-page {
padding: var(--space-md);
}
}
`;

View File

@ -5,75 +5,112 @@
<title>Screen and Audio Capture</title> <title>Screen and Audio Capture</title>
<style> <style>
:root { :root {
/* Backgrounds - with default 0.8 transparency */ /* Backgrounds */
--background-transparent: transparent; --bg-app: #0A0A0A;
--bg-primary: rgba(30, 30, 30, 0.8); --bg-surface: #111111;
--bg-secondary: rgba(37, 37, 38, 0.8); --bg-elevated: #191919;
--bg-tertiary: rgba(45, 45, 45, 0.8); --bg-hover: #1F1F1F;
--bg-hover: rgba(50, 50, 50, 0.8);
/* Text */ /* Text */
--text-color: #e5e5e5; --text-primary: #F5F5F5;
--text-secondary: #a0a0a0; --text-secondary: #999999;
--text-muted: #6b6b6b; --text-muted: #555555;
--description-color: #a0a0a0;
--placeholder-color: #6b6b6b;
/* Borders */ /* Borders & Lines */
--border-color: #3c3c3c; --border: #222222;
--border-subtle: #3c3c3c; --border-strong: #333333;
--border-default: #4a4a4a;
/* Component backgrounds - with default 0.8 transparency */ /* Accent */
--header-background: rgba(30, 30, 30, 0.8); --accent: #3B82F6;
--header-actions-color: #a0a0a0; --accent-hover: #2563EB;
--main-content-background: rgba(30, 30, 30, 0.8);
/* Status */
--success: #22C55E;
--warning: #D4A017;
--danger: #EF4444;
/* Typography */
--font: 'Inter', -apple-system, BlinkMacSystemFont, system-ui, sans-serif;
--font-mono: 'SF Mono', 'Menlo', 'Monaco', 'Consolas', monospace;
--font-size-xs: 11px;
--font-size-sm: 13px;
--font-size-base: 14px;
--font-size-lg: 16px;
--font-size-xl: 20px;
--font-size-2xl: 28px;
--font-weight-normal: 400;
--font-weight-medium: 500;
--font-weight-semibold: 600;
--line-height: 1.6;
/* Spacing */
--space-xs: 4px;
--space-sm: 8px;
--space-md: 16px;
--space-lg: 24px;
--space-xl: 40px;
--space-2xl: 64px;
/* Radius */
--radius-sm: 4px;
--radius-md: 8px;
--radius-lg: 12px;
/* Transitions */
--transition: 150ms ease;
/* Sidebar */
--sidebar-width: 220px;
--sidebar-width-collapsed: 60px;
/* Legacy compatibility — mapped to new tokens */
--background-transparent: transparent;
--bg-primary: var(--bg-app);
--bg-secondary: var(--bg-surface);
--bg-tertiary: var(--bg-elevated);
--text-color: var(--text-primary);
--description-color: var(--text-secondary);
--placeholder-color: var(--text-muted);
--border-color: var(--border);
--border-subtle: var(--border);
--border-default: var(--border-strong);
--header-background: var(--bg-surface);
--header-actions-color: var(--text-secondary);
--main-content-background: var(--bg-app);
--button-background: transparent; --button-background: transparent;
--button-border: #3c3c3c; --button-border: var(--border-strong);
--icon-button-color: #a0a0a0; --icon-button-color: var(--text-secondary);
--hover-background: rgba(50, 50, 50, 0.8); --hover-background: var(--bg-hover);
--input-background: rgba(45, 45, 45, 0.8); --input-background: var(--bg-elevated);
--input-focus-background: rgba(45, 45, 45, 0.8); --input-focus-background: var(--bg-elevated);
--focus-border-color: var(--accent);
/* Focus states - neutral */
--focus-border-color: #4a4a4a;
--focus-box-shadow: transparent; --focus-box-shadow: transparent;
--scrollbar-track: var(--bg-app);
--scrollbar-thumb: var(--border-strong);
--scrollbar-thumb-hover: #444444;
--scrollbar-background: var(--bg-app);
--start-button-background: var(--accent);
--start-button-color: #ffffff;
--start-button-border: var(--accent);
--start-button-hover-background: var(--accent-hover);
--start-button-hover-border: var(--accent-hover);
--text-input-button-background: var(--accent);
--text-input-button-hover: var(--accent-hover);
--link-color: var(--accent);
--key-background: var(--bg-elevated);
--success-color: var(--success);
--warning-color: var(--warning);
--error-color: var(--danger);
--danger-color: var(--danger);
--preview-video-background: var(--bg-surface);
--preview-video-border: var(--border);
--option-label-color: var(--text-primary);
--screen-option-background: var(--bg-surface);
--screen-option-hover-background: var(--bg-elevated);
--screen-option-selected-background: var(--bg-hover);
--screen-option-text: var(--text-secondary);
/* Scrollbar */ /* Layout-specific */
--scrollbar-track: #1e1e1e;
--scrollbar-thumb: #3c3c3c;
--scrollbar-thumb-hover: #4a4a4a;
--scrollbar-background: #1e1e1e;
/* Legacy/misc */
--preview-video-background: #1e1e1e;
--preview-video-border: #3c3c3c;
--option-label-color: #e5e5e5;
--screen-option-background: #252526;
--screen-option-hover-background: #2d2d2d;
--screen-option-selected-background: #323232;
--screen-option-text: #a0a0a0;
/* Buttons */
--start-button-background: #ffffff;
--start-button-color: #1e1e1e;
--start-button-border: #ffffff;
--start-button-hover-background: #e0e0e0;
--start-button-hover-border: #e0e0e0;
--text-input-button-background: #ffffff;
--text-input-button-hover: #e0e0e0;
/* Links - neutral */
--link-color: #e5e5e5;
--key-background: #2d2d2d;
/* Status colors */
--success-color: #4ec9b0;
--warning-color: #dcdcaa;
--error-color: #f14c4c;
--danger-color: #f14c4c;
/* Layout-specific variables */
--header-padding: 8px 16px; --header-padding: 8px 16px;
--header-font-size: 14px; --header-font-size: 14px;
--header-gap: 8px; --header-gap: 8px;
@ -81,49 +118,66 @@
--header-icon-padding: 6px; --header-icon-padding: 6px;
--header-font-size-small: 12px; --header-font-size-small: 12px;
--main-content-padding: 16px; --main-content-padding: 16px;
--main-content-margin-top: 1px; --main-content-margin-top: 0;
--icon-size: 18px; --icon-size: 18px;
--border-radius: 3px; --border-radius: var(--radius-sm);
--content-border-radius: 0; --content-border-radius: 0;
} }
/* Compact layout styles */ html {
:root.compact-layout { margin: 0;
--header-padding: 6px 12px; padding: 0;
--header-font-size: 12px; height: 100%;
--header-gap: 6px; overflow: hidden;
--header-button-padding: 4px 8px; border-radius: 12px;
--header-icon-padding: 4px; background: transparent;
--header-font-size-small: 10px;
--main-content-padding: 12px;
--main-content-margin-top: 1px;
--icon-size: 16px;
--border-radius: 3px;
--content-border-radius: 0;
} }
html,
body { body {
margin: 0; margin: 0;
padding: 0; padding: 0;
height: 100%; height: 100%;
overflow: hidden; overflow: hidden;
background: transparent; background: var(--bg-app);
} color: var(--text-primary);
line-height: var(--line-height);
body { border-radius: 12px;
font-family: border: 1px solid var(--border);
'Inter', font-family: var(--font);
-apple-system, font-size: var(--font-size-base);
BlinkMacSystemFont, font-weight: var(--font-weight-normal);
sans-serif; -webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
} }
* { * {
box-sizing: border-box; box-sizing: border-box;
} }
mastermind-app { :focus-visible {
outline: 2px solid var(--accent);
outline-offset: 2px;
}
::-webkit-scrollbar {
width: 6px;
height: 6px;
}
::-webkit-scrollbar-track {
background: transparent;
}
::-webkit-scrollbar-thumb {
background: var(--border-strong);
border-radius: 3px;
}
::-webkit-scrollbar-thumb:hover {
background: #444444;
}
cheating-daddy-app {
display: block; display: block;
width: 100%; width: 100%;
height: 100%; height: 100%;
@ -134,9 +188,10 @@
<script src="assets/marked-4.3.0.min.js"></script> <script src="assets/marked-4.3.0.min.js"></script>
<script src="assets/highlight-11.9.0.min.js"></script> <script src="assets/highlight-11.9.0.min.js"></script>
<link rel="stylesheet" href="assets/highlight-vscode-dark.min.css" /> <link rel="stylesheet" href="assets/highlight-vscode-dark.min.css" />
<script type="module" src="components/app/MastermindApp.js"></script> <script type="module" src="components/app/CheatingDaddyApp.js"></script>
<mastermind-app id="mastermind"></mastermind-app> <cheating-daddy-app id="cheatingDaddy"></cheating-daddy-app>
<script src="script.js"></script>
<script src="utils/renderer.js"></script> <script src="utils/renderer.js"></script>
</body> </body>
</html> </html>

View File

@ -1,370 +1,336 @@
if (require('electron-squirrel-startup')) { if (require("electron-squirrel-startup")) {
process.exit(0); process.exit(0);
} }
const { app, BrowserWindow, shell, ipcMain } = require('electron'); // ── Global crash handlers to prevent silent process termination ──
const { createWindow, updateGlobalShortcuts } = require('./utils/window'); process.on("uncaughtException", (error) => {
const { setupAIProviderIpcHandlers } = require('./utils/ai-provider-manager'); console.error("[FATAL] Uncaught exception:", error);
const { stopMacOSAudioCapture } = require('./utils/gemini'); try {
const { initLogger, closeLogger, getLogPath } = require('./utils/logger'); const { sendToRenderer } = require("./utils/gemini");
const storage = require('./storage'); sendToRenderer(
"update-status",
"Fatal error: " + (error?.message || "unknown"),
);
} catch (_) {
// sendToRenderer may not be available yet
}
});
process.on("unhandledRejection", (reason) => {
console.error("[FATAL] Unhandled promise rejection:", reason);
try {
const { sendToRenderer } = require("./utils/gemini");
sendToRenderer(
"update-status",
"Unhandled error: " +
(reason instanceof Error ? reason.message : String(reason)),
);
} catch (_) {
// sendToRenderer may not be available yet
}
});
const { app, BrowserWindow, shell, ipcMain } = require("electron");
const { createWindow, updateGlobalShortcuts } = require("./utils/window");
const {
setupGeminiIpcHandlers,
stopMacOSAudioCapture,
sendToRenderer,
} = require("./utils/gemini");
const storage = require("./storage");
const geminiSessionRef = { current: null }; const geminiSessionRef = { current: null };
let mainWindow = null; let mainWindow = null;
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function createMainWindow() { function createMainWindow() {
mainWindow = createWindow(sendToRenderer, geminiSessionRef); mainWindow = createWindow(sendToRenderer, geminiSessionRef);
return mainWindow; return mainWindow;
} }
app.whenReady().then(async () => { app.whenReady().then(async () => {
// Initialize file logger first // Initialize storage (checks version, resets if needed)
const logPath = initLogger(); storage.initializeStorage();
console.log('App starting, log file:', logPath);
// Initialize storage (checks version, resets if needed) // Trigger screen recording permission prompt on macOS if not already granted
storage.initializeStorage(); if (process.platform === "darwin") {
const { desktopCapturer } = require("electron");
desktopCapturer.getSources({ types: ["screen"] }).catch(() => {});
}
createMainWindow();
setupGeminiIpcHandlers(geminiSessionRef);
setupStorageIpcHandlers();
setupGeneralIpcHandlers();
});
app.on("window-all-closed", () => {
stopMacOSAudioCapture();
if (process.platform !== "darwin") {
app.quit();
}
});
app.on("before-quit", () => {
stopMacOSAudioCapture();
});
app.on("activate", () => {
if (BrowserWindow.getAllWindows().length === 0) {
createMainWindow(); createMainWindow();
setupAIProviderIpcHandlers(geminiSessionRef); }
setupStorageIpcHandlers();
setupGeneralIpcHandlers();
// Add handler to get log path from renderer
ipcMain.handle('get-log-path', () => getLogPath());
// Add handler for renderer logs (so they go to the log file)
ipcMain.on('renderer-log', (event, { level, message }) => {
const prefix = '[RENDERER]';
if (level === 'error') console.error(prefix, message);
else if (level === 'warn') console.warn(prefix, message);
else console.log(prefix, message);
});
});
app.on('window-all-closed', () => {
stopMacOSAudioCapture();
closeLogger();
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('before-quit', () => {
stopMacOSAudioCapture();
closeLogger();
});
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) {
createMainWindow();
}
}); });
function setupStorageIpcHandlers() { function setupStorageIpcHandlers() {
// ============ CONFIG ============ // ============ CONFIG ============
ipcMain.handle('storage:get-config', async () => { ipcMain.handle("storage:get-config", async () => {
try { try {
return { success: true, data: storage.getConfig() }; return { success: true, data: storage.getConfig() };
} catch (error) { } catch (error) {
console.error('Error getting config:', error); console.error("Error getting config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-config', async (event, config) => { ipcMain.handle("storage:set-config", async (event, config) => {
try { try {
storage.setConfig(config); storage.setConfig(config);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting config:', error); console.error("Error setting config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:update-config', async (event, key, value) => { ipcMain.handle("storage:update-config", async (event, key, value) => {
try { try {
storage.updateConfig(key, value); storage.updateConfig(key, value);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error updating config:', error); console.error("Error updating config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ CREDENTIALS ============ // ============ CREDENTIALS ============
ipcMain.handle('storage:get-credentials', async () => { ipcMain.handle("storage:get-credentials", async () => {
try { try {
return { success: true, data: storage.getCredentials() }; return { success: true, data: storage.getCredentials() };
} catch (error) { } catch (error) {
console.error('Error getting credentials:', error); console.error("Error getting credentials:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-credentials', async (event, credentials) => { ipcMain.handle("storage:set-credentials", async (event, credentials) => {
try { try {
storage.setCredentials(credentials); storage.setCredentials(credentials);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting credentials:', error); console.error("Error setting credentials:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-api-key', async () => { ipcMain.handle("storage:get-api-key", async () => {
try { try {
return { success: true, data: storage.getApiKey() }; return { success: true, data: storage.getApiKey() };
} catch (error) { } catch (error) {
console.error('Error getting API key:', error); console.error("Error getting API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-api-key', async (event, apiKey) => { ipcMain.handle("storage:set-api-key", async (event, apiKey) => {
try { try {
storage.setApiKey(apiKey); storage.setApiKey(apiKey);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting API key:', error); console.error("Error setting API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-openai-credentials', async () => { ipcMain.handle("storage:get-groq-api-key", async () => {
try { try {
return { success: true, data: storage.getOpenAICredentials() }; return { success: true, data: storage.getGroqApiKey() };
} catch (error) { } catch (error) {
console.error('Error getting OpenAI credentials:', error); console.error("Error getting Groq API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-openai-credentials', async (event, config) => { ipcMain.handle("storage:set-groq-api-key", async (event, groqApiKey) => {
try { try {
storage.setOpenAICredentials(config); storage.setGroqApiKey(groqApiKey);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting OpenAI credentials:', error); console.error("Error setting Groq API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-openai-sdk-credentials', async () => { // ============ PREFERENCES ============
try { ipcMain.handle("storage:get-preferences", async () => {
return { success: true, data: storage.getOpenAISDKCredentials() }; try {
} catch (error) { return { success: true, data: storage.getPreferences() };
console.error('Error getting OpenAI SDK credentials:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error getting preferences:", error);
} return { success: false, error: error.message };
}); }
});
ipcMain.handle('storage:set-openai-sdk-credentials', async (event, config) => { ipcMain.handle("storage:set-preferences", async (event, preferences) => {
try { try {
storage.setOpenAISDKCredentials(config); storage.setPreferences(preferences);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting OpenAI SDK credentials:', error); console.error("Error setting preferences:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ PREFERENCES ============ ipcMain.handle("storage:update-preference", async (event, key, value) => {
ipcMain.handle('storage:get-preferences', async () => { try {
try { storage.updatePreference(key, value);
return { success: true, data: storage.getPreferences() }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error getting preferences:', error); console.error("Error updating preference:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-preferences', async (event, preferences) => { // ============ KEYBINDS ============
try { ipcMain.handle("storage:get-keybinds", async () => {
storage.setPreferences(preferences); try {
return { success: true }; return { success: true, data: storage.getKeybinds() };
} catch (error) { } catch (error) {
console.error('Error setting preferences:', error); console.error("Error getting keybinds:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:update-preference', async (event, key, value) => { ipcMain.handle("storage:set-keybinds", async (event, keybinds) => {
try { try {
storage.updatePreference(key, value); storage.setKeybinds(keybinds);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error updating preference:', error); console.error("Error setting keybinds:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ KEYBINDS ============ // ============ HISTORY ============
ipcMain.handle('storage:get-keybinds', async () => { ipcMain.handle("storage:get-all-sessions", async () => {
try { try {
return { success: true, data: storage.getKeybinds() }; return { success: true, data: storage.getAllSessions() };
} catch (error) { } catch (error) {
console.error('Error getting keybinds:', error); console.error("Error getting sessions:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-keybinds', async (event, keybinds) => { ipcMain.handle("storage:get-session", async (event, sessionId) => {
try { try {
storage.setKeybinds(keybinds); return { success: true, data: storage.getSession(sessionId) };
return { success: true }; } catch (error) {
} catch (error) { console.error("Error getting session:", error);
console.error('Error setting keybinds:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
// ============ HISTORY ============ ipcMain.handle("storage:save-session", async (event, sessionId, data) => {
ipcMain.handle('storage:get-all-sessions', async () => { try {
try { storage.saveSession(sessionId, data);
return { success: true, data: storage.getAllSessions() }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error getting sessions:', error); console.error("Error saving session:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-session', async (event, sessionId) => { ipcMain.handle("storage:delete-session", async (event, sessionId) => {
try { try {
return { success: true, data: storage.getSession(sessionId) }; storage.deleteSession(sessionId);
} catch (error) { return { success: true };
console.error('Error getting session:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error deleting session:", error);
} return { success: false, error: error.message };
}); }
});
ipcMain.handle('storage:save-session', async (event, sessionId, data) => { ipcMain.handle("storage:delete-all-sessions", async () => {
try { try {
storage.saveSession(sessionId, data); storage.deleteAllSessions();
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error saving session:', error); console.error("Error deleting all sessions:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:delete-session', async (event, sessionId) => { // ============ LIMITS ============
try { ipcMain.handle("storage:get-today-limits", async () => {
storage.deleteSession(sessionId); try {
return { success: true }; return { success: true, data: storage.getTodayLimits() };
} catch (error) { } catch (error) {
console.error('Error deleting session:', error); console.error("Error getting today limits:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:delete-all-sessions', async () => { // ============ CLEAR ALL ============
try { ipcMain.handle("storage:clear-all", async () => {
storage.deleteAllSessions(); try {
return { success: true }; storage.clearAllData();
} catch (error) { return { success: true };
console.error('Error deleting all sessions:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error clearing all data:", error);
} return { success: false, error: error.message };
}); }
});
// ============ LIMITS ============
ipcMain.handle('storage:get-today-limits', async () => {
try {
return { success: true, data: storage.getTodayLimits() };
} catch (error) {
console.error('Error getting today limits:', error);
return { success: false, error: error.message };
}
});
// ============ CLEAR ALL ============
ipcMain.handle('storage:clear-all', async () => {
try {
storage.clearAllData();
return { success: true };
} catch (error) {
console.error('Error clearing all data:', error);
return { success: false, error: error.message };
}
});
// ============ MIGRATION ============
ipcMain.handle('storage:has-old-config', async () => {
try {
return { success: true, data: storage.hasOldConfig() };
} catch (error) {
console.error('Error checking old config:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('storage:migrate-from-old-config', async () => {
try {
const success = storage.migrateFromOldConfig();
return { success: true, data: success };
} catch (error) {
console.error('Error migrating from old config:', error);
return { success: false, error: error.message };
}
});
} }
function setupGeneralIpcHandlers() { function setupGeneralIpcHandlers() {
ipcMain.handle('get-app-version', async () => { ipcMain.handle("get-app-version", async () => {
return app.getVersion(); return app.getVersion();
}); });
ipcMain.handle('open-logs-folder', async () => { ipcMain.handle("quit-application", async (event) => {
try { try {
const logPath = getLogPath(); stopMacOSAudioCapture();
const logsDir = require('path').dirname(logPath); app.quit();
await shell.openPath(logsDir); return { success: true };
return { success: true, path: logsDir }; } catch (error) {
} catch (error) { console.error("Error quitting application:", error);
console.error('Error opening logs folder:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
ipcMain.handle('quit-application', async event => { ipcMain.handle("open-external", async (event, url) => {
try { try {
stopMacOSAudioCapture(); await shell.openExternal(url);
app.quit(); return { success: true };
return { success: true }; } catch (error) {
} catch (error) { console.error("Error opening external URL:", error);
console.error('Error quitting application:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
ipcMain.handle('open-external', async (event, url) => { ipcMain.on("update-keybinds", (event, newKeybinds) => {
try { if (mainWindow) {
await shell.openExternal(url); // Also save to storage
return { success: true }; storage.setKeybinds(newKeybinds);
} catch (error) { updateGlobalShortcuts(
console.error('Error opening external URL:', error); newKeybinds,
return { success: false, error: error.message }; mainWindow,
} sendToRenderer,
}); geminiSessionRef,
);
}
});
ipcMain.on('update-keybinds', (event, newKeybinds) => { // Debug logging from renderer
if (mainWindow) { ipcMain.on("log-message", (event, msg) => {
// Also save to storage console.log(msg);
storage.setKeybinds(newKeybinds); });
updateGlobalShortcuts(newKeybinds, mainWindow, sendToRenderer, geminiSessionRef);
}
});
// Debug logging from renderer
ipcMain.on('log-message', (event, msg) => {
console.log(msg);
});
} }

View File

@ -1,569 +1,574 @@
const fs = require('fs'); const fs = require("fs");
const path = require('path'); const path = require("path");
const os = require('os'); const os = require("os");
const CONFIG_VERSION = 1; const CONFIG_VERSION = 1;
// Default values // Default values
const DEFAULT_CONFIG = { const DEFAULT_CONFIG = {
configVersion: CONFIG_VERSION, configVersion: CONFIG_VERSION,
onboarded: false, onboarded: false,
layout: 'normal', layout: "normal",
}; };
const DEFAULT_CREDENTIALS = { const DEFAULT_CREDENTIALS = {
apiKey: '', apiKey: "",
// OpenAI Realtime API settings groqApiKey: "",
openaiApiKey: '', openaiCompatibleApiKey: "",
openaiBaseUrl: '', openaiCompatibleBaseUrl: "",
openaiModel: 'gpt-4o-realtime-preview-2024-12-17', openaiCompatibleModel: "",
// OpenAI SDK settings (for BotHub and other providers)
openaiSdkApiKey: '',
openaiSdkBaseUrl: '',
openaiSdkModel: 'gpt-4o',
openaiSdkVisionModel: 'gpt-4o',
openaiSdkWhisperModel: 'whisper-1',
}; };
const DEFAULT_PREFERENCES = { const DEFAULT_PREFERENCES = {
customPrompt: '', customPrompt: "",
selectedProfile: 'interview', selectedProfile: "interview",
selectedLanguage: 'en-US', selectedLanguage: "en-US",
selectedScreenshotInterval: '5', selectedScreenshotInterval: "5",
selectedImageQuality: 'medium', selectedImageQuality: "medium",
advancedMode: false, advancedMode: false,
audioMode: 'speaker_only', audioMode: "speaker_only",
audioInputMode: 'auto', fontSize: "medium",
fontSize: 'medium', backgroundTransparency: 0.8,
backgroundTransparency: 0.8, googleSearchEnabled: false,
googleSearchEnabled: false, responseProvider: "gemini",
aiProvider: 'gemini', ollamaHost: "http://127.0.0.1:11434",
ollamaModel: "llama3.1",
whisperModel: "Xenova/whisper-small",
whisperDevice: "", // '' = auto-detect, 'cpu' = native, 'wasm' = compatible
}; };
const DEFAULT_KEYBINDS = null; // null means use system defaults const DEFAULT_KEYBINDS = null; // null means use system defaults
const DEFAULT_LIMITS = { const DEFAULT_LIMITS = {
data: [], // Array of { date: 'YYYY-MM-DD', flash: { count: 0 }, flashLite: { count: 0 } } data: [], // Array of { date: 'YYYY-MM-DD', flash: { count }, flashLite: { count }, groq: { 'qwen3-32b': { chars, limit }, 'gpt-oss-120b': { chars, limit }, 'gpt-oss-20b': { chars, limit } }, gemini: { 'gemma-3-27b-it': { chars } } }
}; };
// Get the config directory path based on OS // Get the config directory path based on OS
function getConfigDir() { function getConfigDir() {
const platform = os.platform(); const platform = os.platform();
let configDir; let configDir;
if (platform === 'win32') { if (platform === "win32") {
configDir = path.join(os.homedir(), 'AppData', 'Roaming', 'mastermind-config'); configDir = path.join(
} else if (platform === 'darwin') { os.homedir(),
configDir = path.join(os.homedir(), 'Library', 'Application Support', 'mastermind-config'); "AppData",
} else { "Roaming",
configDir = path.join(os.homedir(), '.config', 'mastermind-config'); "cheating-daddy-config",
} );
} else if (platform === "darwin") {
configDir = path.join(
os.homedir(),
"Library",
"Application Support",
"cheating-daddy-config",
);
} else {
configDir = path.join(os.homedir(), ".config", "cheating-daddy-config");
}
return configDir; return configDir;
}
// Get the old config directory path for migration
function getOldConfigDir() {
const platform = os.platform();
let configDir;
if (platform === 'win32') {
configDir = path.join(os.homedir(), 'AppData', 'Roaming', 'cheating-daddy-config');
} else if (platform === 'darwin') {
configDir = path.join(os.homedir(), 'Library', 'Application Support', 'cheating-daddy-config');
} else {
configDir = path.join(os.homedir(), '.config', 'cheating-daddy-config');
}
return configDir;
}
// Check if old config directory exists
function hasOldConfig() {
const oldDir = getOldConfigDir();
return fs.existsSync(oldDir);
}
// Migrate config from old directory to new directory if needed
function migrateFromOldConfig() {
const oldDir = getOldConfigDir();
const newDir = getConfigDir();
if (!fs.existsSync(oldDir)) {
console.log('No old config found to migrate');
return false;
}
if (fs.existsSync(newDir)) {
// NOTE: Does not matter if the new config directory already exists, we will overwrite it with the old config
fs.rmSync(newDir, { recursive: true, force: true });
console.log('New config directory already exists, overwriting with old config');
}
console.log(`Migrating config from ${oldDir} to ${newDir}...`);
try {
const parentDir = path.dirname(newDir);
if (!fs.existsSync(parentDir)) {
fs.mkdirSync(parentDir, { recursive: true });
}
fs.renameSync(oldDir, newDir);
console.log('Migration successful');
return true;
} catch (error) {
console.error('Migration failed:', error.message);
return false;
}
} }
// File paths // File paths
function getConfigPath() { function getConfigPath() {
return path.join(getConfigDir(), 'config.json'); return path.join(getConfigDir(), "config.json");
} }
function getCredentialsPath() { function getCredentialsPath() {
return path.join(getConfigDir(), 'credentials.json'); return path.join(getConfigDir(), "credentials.json");
} }
function getPreferencesPath() { function getPreferencesPath() {
return path.join(getConfigDir(), 'preferences.json'); return path.join(getConfigDir(), "preferences.json");
} }
function getKeybindsPath() { function getKeybindsPath() {
return path.join(getConfigDir(), 'keybinds.json'); return path.join(getConfigDir(), "keybinds.json");
} }
function getLimitsPath() { function getLimitsPath() {
return path.join(getConfigDir(), 'limits.json'); return path.join(getConfigDir(), "limits.json");
} }
function getHistoryDir() { function getHistoryDir() {
return path.join(getConfigDir(), 'history'); return path.join(getConfigDir(), "history");
} }
// Helper to read JSON file safely // Helper to read JSON file safely
function readJsonFile(filePath, defaultValue) { function readJsonFile(filePath, defaultValue) {
try { try {
if (fs.existsSync(filePath)) { if (fs.existsSync(filePath)) {
const data = fs.readFileSync(filePath, 'utf8'); const data = fs.readFileSync(filePath, "utf8");
return JSON.parse(data); return JSON.parse(data);
}
} catch (error) {
console.warn(`Error reading ${filePath}:`, error.message);
} }
return defaultValue; } catch (error) {
console.warn(`Error reading ${filePath}:`, error.message);
}
return defaultValue;
} }
// Helper to write JSON file safely // Helper to write JSON file safely
function writeJsonFile(filePath, data) { function writeJsonFile(filePath, data) {
try { try {
const dir = path.dirname(filePath); const dir = path.dirname(filePath);
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true }); fs.mkdirSync(dir, { recursive: true });
}
fs.writeFileSync(filePath, JSON.stringify(data, null, 2), 'utf8');
return true;
} catch (error) {
console.error(`Error writing ${filePath}:`, error.message);
return false;
} }
fs.writeFileSync(filePath, JSON.stringify(data, null, 2), "utf8");
return true;
} catch (error) {
console.error(`Error writing ${filePath}:`, error.message);
return false;
}
} }
// Check if we need to reset (no configVersion or wrong version) // Check if we need to reset (no configVersion or wrong version)
function needsReset() { function needsReset() {
const configPath = getConfigPath(); const configPath = getConfigPath();
if (!fs.existsSync(configPath)) { if (!fs.existsSync(configPath)) {
return true; return true;
} }
try { try {
const config = JSON.parse(fs.readFileSync(configPath, 'utf8')); const config = JSON.parse(fs.readFileSync(configPath, "utf8"));
return !config.configVersion || config.configVersion !== CONFIG_VERSION; return !config.configVersion || config.configVersion !== CONFIG_VERSION;
} catch { } catch {
return true; return true;
} }
} }
// Wipe and reinitialize the config directory // Wipe and reinitialize the config directory
function resetConfigDir() { function resetConfigDir() {
const configDir = getConfigDir(); const configDir = getConfigDir();
console.log('Resetting config directory...'); console.log("Resetting config directory...");
// Remove existing directory if it exists // Remove existing directory if it exists
if (fs.existsSync(configDir)) { if (fs.existsSync(configDir)) {
fs.rmSync(configDir, { recursive: true, force: true }); fs.rmSync(configDir, { recursive: true, force: true });
} }
// Create fresh directory structure // Create fresh directory structure
fs.mkdirSync(configDir, { recursive: true }); fs.mkdirSync(configDir, { recursive: true });
fs.mkdirSync(getHistoryDir(), { recursive: true }); fs.mkdirSync(getHistoryDir(), { recursive: true });
// Initialize with defaults // Initialize with defaults
writeJsonFile(getConfigPath(), DEFAULT_CONFIG); writeJsonFile(getConfigPath(), DEFAULT_CONFIG);
writeJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS); writeJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS);
writeJsonFile(getPreferencesPath(), DEFAULT_PREFERENCES); writeJsonFile(getPreferencesPath(), DEFAULT_PREFERENCES);
console.log('Config directory initialized with defaults'); console.log("Config directory initialized with defaults");
} }
// Initialize storage - call this on app startup // Initialize storage - call this on app startup
function initializeStorage() { function initializeStorage() {
if (needsReset()) { if (needsReset()) {
resetConfigDir(); resetConfigDir();
} else { } else {
// Ensure history directory exists // Ensure history directory exists
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
if (!fs.existsSync(historyDir)) { if (!fs.existsSync(historyDir)) {
fs.mkdirSync(historyDir, { recursive: true }); fs.mkdirSync(historyDir, { recursive: true });
}
} }
}
} }
// ============ CONFIG ============ // ============ CONFIG ============
function getConfig() { function getConfig() {
return readJsonFile(getConfigPath(), DEFAULT_CONFIG); return readJsonFile(getConfigPath(), DEFAULT_CONFIG);
} }
function setConfig(config) { function setConfig(config) {
const current = getConfig(); const current = getConfig();
const updated = { ...current, ...config, configVersion: CONFIG_VERSION }; const updated = { ...current, ...config, configVersion: CONFIG_VERSION };
return writeJsonFile(getConfigPath(), updated); return writeJsonFile(getConfigPath(), updated);
} }
function updateConfig(key, value) { function updateConfig(key, value) {
const config = getConfig(); const config = getConfig();
config[key] = value; config[key] = value;
return writeJsonFile(getConfigPath(), config); return writeJsonFile(getConfigPath(), config);
} }
// ============ CREDENTIALS ============ // ============ CREDENTIALS ============
function getCredentials() { function getCredentials() {
return readJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS); return readJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS);
} }
function setCredentials(credentials) { function setCredentials(credentials) {
const current = getCredentials(); const current = getCredentials();
const updated = { ...current, ...credentials }; const updated = { ...current, ...credentials };
return writeJsonFile(getCredentialsPath(), updated); return writeJsonFile(getCredentialsPath(), updated);
} }
function getApiKey() { function getApiKey() {
return getCredentials().apiKey || ''; return getCredentials().apiKey || "";
} }
function setApiKey(apiKey) { function setApiKey(apiKey) {
return setCredentials({ apiKey }); return setCredentials({ apiKey });
} }
function getOpenAICredentials() { function getGroqApiKey() {
const creds = getCredentials(); return getCredentials().groqApiKey || "";
return {
apiKey: creds.openaiApiKey || '',
baseUrl: creds.openaiBaseUrl || '',
model: creds.openaiModel || 'gpt-4o-realtime-preview-2024-12-17',
};
} }
function setOpenAICredentials(config) { function setGroqApiKey(groqApiKey) {
const updates = {}; return setCredentials({ groqApiKey });
if (config.apiKey !== undefined) updates.openaiApiKey = config.apiKey;
if (config.baseUrl !== undefined) updates.openaiBaseUrl = config.baseUrl;
if (config.model !== undefined) updates.openaiModel = config.model;
return setCredentials(updates);
} }
function getOpenAISDKCredentials() { function getOpenAICompatibleConfig() {
const creds = getCredentials(); const creds = getCredentials();
return { return {
apiKey: creds.openaiSdkApiKey || '', apiKey: creds.openaiCompatibleApiKey || "",
baseUrl: creds.openaiSdkBaseUrl || '', baseUrl: creds.openaiCompatibleBaseUrl || "",
model: creds.openaiSdkModel || 'gpt-4o', model: creds.openaiCompatibleModel || "",
visionModel: creds.openaiSdkVisionModel || 'gpt-4o', };
whisperModel: creds.openaiSdkWhisperModel || 'whisper-1',
};
} }
function setOpenAISDKCredentials(config) { function setOpenAICompatibleConfig(apiKey, baseUrl, model) {
const updates = {}; return setCredentials({
if (config.apiKey !== undefined) updates.openaiSdkApiKey = config.apiKey; openaiCompatibleApiKey: apiKey,
if (config.baseUrl !== undefined) updates.openaiSdkBaseUrl = config.baseUrl; openaiCompatibleBaseUrl: baseUrl,
if (config.model !== undefined) updates.openaiSdkModel = config.model; openaiCompatibleModel: model,
if (config.visionModel !== undefined) updates.openaiSdkVisionModel = config.visionModel; });
if (config.whisperModel !== undefined) updates.openaiSdkWhisperModel = config.whisperModel;
return setCredentials(updates);
} }
// ============ PREFERENCES ============ // ============ PREFERENCES ============
function getPreferences() { function getPreferences() {
const saved = readJsonFile(getPreferencesPath(), {}); const saved = readJsonFile(getPreferencesPath(), {});
return { ...DEFAULT_PREFERENCES, ...saved }; return { ...DEFAULT_PREFERENCES, ...saved };
} }
function setPreferences(preferences) { function setPreferences(preferences) {
const current = getPreferences(); const current = getPreferences();
const updated = { ...current, ...preferences }; const updated = { ...current, ...preferences };
return writeJsonFile(getPreferencesPath(), updated); return writeJsonFile(getPreferencesPath(), updated);
} }
function updatePreference(key, value) { function updatePreference(key, value) {
const preferences = getPreferences(); const preferences = getPreferences();
preferences[key] = value; preferences[key] = value;
return writeJsonFile(getPreferencesPath(), preferences); return writeJsonFile(getPreferencesPath(), preferences);
} }
// ============ KEYBINDS ============ // ============ KEYBINDS ============
function getKeybinds() { function getKeybinds() {
return readJsonFile(getKeybindsPath(), DEFAULT_KEYBINDS); return readJsonFile(getKeybindsPath(), DEFAULT_KEYBINDS);
} }
function setKeybinds(keybinds) { function setKeybinds(keybinds) {
return writeJsonFile(getKeybindsPath(), keybinds); return writeJsonFile(getKeybindsPath(), keybinds);
} }
// ============ LIMITS (Rate Limiting) ============ // ============ LIMITS (Rate Limiting) ============
function getLimits() { function getLimits() {
return readJsonFile(getLimitsPath(), DEFAULT_LIMITS); return readJsonFile(getLimitsPath(), DEFAULT_LIMITS);
} }
function setLimits(limits) { function setLimits(limits) {
return writeJsonFile(getLimitsPath(), limits); return writeJsonFile(getLimitsPath(), limits);
} }
function getTodayDateString() { function getTodayDateString() {
const now = new Date(); const now = new Date();
return now.toISOString().split('T')[0]; // YYYY-MM-DD return now.toISOString().split("T")[0]; // YYYY-MM-DD
} }
function getTodayLimits() { function getTodayLimits() {
const limits = getLimits(); const limits = getLimits();
const today = getTodayDateString(); const today = getTodayDateString();
// Find today's entry // Find today's entry
const todayEntry = limits.data.find(entry => entry.date === today); const todayEntry = limits.data.find((entry) => entry.date === today);
if (todayEntry) { if (todayEntry) {
return todayEntry; // ensure new fields exist
if (!todayEntry.groq) {
todayEntry.groq = {
"qwen3-32b": { chars: 0, limit: 1500000 },
"gpt-oss-120b": { chars: 0, limit: 600000 },
"gpt-oss-20b": { chars: 0, limit: 600000 },
"kimi-k2-instruct": { chars: 0, limit: 600000 },
};
}
if (!todayEntry.gemini) {
todayEntry.gemini = {
"gemma-3-27b-it": { chars: 0 },
};
} }
// No entry for today - clean old entries and create new one
limits.data = limits.data.filter(entry => entry.date === today);
const newEntry = {
date: today,
flash: { count: 0 },
flashLite: { count: 0 },
};
limits.data.push(newEntry);
setLimits(limits); setLimits(limits);
return todayEntry;
}
return newEntry; // No entry for today - clean old entries and create new one
limits.data = limits.data.filter((entry) => entry.date === today);
const newEntry = {
date: today,
flash: { count: 0 },
flashLite: { count: 0 },
groq: {
"qwen3-32b": { chars: 0, limit: 1500000 },
"gpt-oss-120b": { chars: 0, limit: 600000 },
"gpt-oss-20b": { chars: 0, limit: 600000 },
"kimi-k2-instruct": { chars: 0, limit: 600000 },
},
gemini: {
"gemma-3-27b-it": { chars: 0 },
},
};
limits.data.push(newEntry);
setLimits(limits);
return newEntry;
} }
function incrementLimitCount(model) { function incrementLimitCount(model) {
const limits = getLimits(); const limits = getLimits();
const today = getTodayDateString(); const today = getTodayDateString();
// Find or create today's entry // Find or create today's entry
let todayEntry = limits.data.find(entry => entry.date === today); let todayEntry = limits.data.find((entry) => entry.date === today);
if (!todayEntry) { if (!todayEntry) {
// Clean old entries and create new one // Clean old entries and create new one
limits.data = []; limits.data = [];
todayEntry = { todayEntry = {
date: today, date: today,
flash: { count: 0 }, flash: { count: 0 },
flashLite: { count: 0 }, flashLite: { count: 0 },
}; };
limits.data.push(todayEntry); limits.data.push(todayEntry);
} else { } else {
// Clean old entries, keep only today // Clean old entries, keep only today
limits.data = limits.data.filter(entry => entry.date === today); limits.data = limits.data.filter((entry) => entry.date === today);
} }
// Increment the appropriate model count // Increment the appropriate model count
if (model === 'gemini-2.5-flash') { if (model === "gemini-2.5-flash") {
todayEntry.flash.count++; todayEntry.flash.count++;
} else if (model === 'gemini-2.5-flash-lite') { } else if (model === "gemini-2.5-flash-lite") {
todayEntry.flashLite.count++; todayEntry.flashLite.count++;
} }
setLimits(limits);
return todayEntry;
}
function incrementCharUsage(provider, model, charCount) {
getTodayLimits();
const limits = getLimits();
const today = getTodayDateString();
const todayEntry = limits.data.find((entry) => entry.date === today);
if (todayEntry[provider] && todayEntry[provider][model]) {
todayEntry[provider][model].chars += charCount;
setLimits(limits); setLimits(limits);
return todayEntry; }
return todayEntry;
} }
function getAvailableModel() { function getAvailableModel() {
const todayLimits = getTodayLimits(); const todayLimits = getTodayLimits();
// RPD limits: flash = 20, flash-lite = 20 // RPD limits: flash = 20, flash-lite = 20
// After both exhausted, fall back to flash (for paid API users) // After both exhausted, fall back to flash (for paid API users)
if (todayLimits.flash.count < 20) { if (todayLimits.flash.count < 20) {
return 'gemini-2.5-flash'; return "gemini-2.5-flash";
} else if (todayLimits.flashLite.count < 20) { } else if (todayLimits.flashLite.count < 20) {
return 'gemini-2.5-flash-lite'; return "gemini-2.5-flash-lite";
} }
return 'gemini-2.5-flash'; // Default to flash for paid API users return "gemini-2.5-flash"; // Default to flash for paid API users
}
function getModelForToday() {
const todayEntry = getTodayLimits();
const groq = todayEntry.groq;
if (groq["qwen3-32b"].chars < groq["qwen3-32b"].limit) {
return "qwen/qwen3-32b";
}
if (groq["gpt-oss-120b"].chars < groq["gpt-oss-120b"].limit) {
return "openai/gpt-oss-120b";
}
if (groq["gpt-oss-20b"].chars < groq["gpt-oss-20b"].limit) {
return "openai/gpt-oss-20b";
}
if (groq["kimi-k2-instruct"].chars < groq["kimi-k2-instruct"].limit) {
return "moonshotai/kimi-k2-instruct";
}
// All limits exhausted
return null;
} }
// ============ HISTORY ============ // ============ HISTORY ============
function getSessionPath(sessionId) { function getSessionPath(sessionId) {
return path.join(getHistoryDir(), `${sessionId}.json`); return path.join(getHistoryDir(), `${sessionId}.json`);
} }
function saveSession(sessionId, data) { function saveSession(sessionId, data) {
const sessionPath = getSessionPath(sessionId); const sessionPath = getSessionPath(sessionId);
// Load existing session to preserve metadata // Load existing session to preserve metadata
const existingSession = readJsonFile(sessionPath, null); const existingSession = readJsonFile(sessionPath, null);
const sessionData = { const sessionData = {
sessionId, sessionId,
createdAt: existingSession?.createdAt || parseInt(sessionId), createdAt: existingSession?.createdAt || parseInt(sessionId),
lastUpdated: Date.now(), lastUpdated: Date.now(),
// Profile context - set once when session starts // Profile context - set once when session starts
profile: data.profile || existingSession?.profile || null, profile: data.profile || existingSession?.profile || null,
customPrompt: data.customPrompt || existingSession?.customPrompt || null, customPrompt: data.customPrompt || existingSession?.customPrompt || null,
// Conversation data // Conversation data
conversationHistory: data.conversationHistory || existingSession?.conversationHistory || [], conversationHistory:
screenAnalysisHistory: data.screenAnalysisHistory || existingSession?.screenAnalysisHistory || [], data.conversationHistory || existingSession?.conversationHistory || [],
}; screenAnalysisHistory:
return writeJsonFile(sessionPath, sessionData); data.screenAnalysisHistory ||
existingSession?.screenAnalysisHistory ||
[],
};
return writeJsonFile(sessionPath, sessionData);
} }
function getSession(sessionId) { function getSession(sessionId) {
return readJsonFile(getSessionPath(sessionId), null); return readJsonFile(getSessionPath(sessionId), null);
} }
function getAllSessions() { function getAllSessions() {
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
try { try {
if (!fs.existsSync(historyDir)) { if (!fs.existsSync(historyDir)) {
return []; return [];
}
const files = fs
.readdirSync(historyDir)
.filter(f => f.endsWith('.json'))
.sort((a, b) => {
// Sort by timestamp descending (newest first)
const tsA = parseInt(a.replace('.json', ''));
const tsB = parseInt(b.replace('.json', ''));
return tsB - tsA;
});
return files
.map(file => {
const sessionId = file.replace('.json', '');
const data = readJsonFile(path.join(historyDir, file), null);
if (data) {
return {
sessionId,
createdAt: data.createdAt,
lastUpdated: data.lastUpdated,
messageCount: data.conversationHistory?.length || 0,
screenAnalysisCount: data.screenAnalysisHistory?.length || 0,
profile: data.profile || null,
customPrompt: data.customPrompt || null,
};
}
return null;
})
.filter(Boolean);
} catch (error) {
console.error('Error reading sessions:', error.message);
return [];
} }
const files = fs
.readdirSync(historyDir)
.filter((f) => f.endsWith(".json"))
.sort((a, b) => {
// Sort by timestamp descending (newest first)
const tsA = parseInt(a.replace(".json", ""));
const tsB = parseInt(b.replace(".json", ""));
return tsB - tsA;
});
return files
.map((file) => {
const sessionId = file.replace(".json", "");
const data = readJsonFile(path.join(historyDir, file), null);
if (data) {
return {
sessionId,
createdAt: data.createdAt,
lastUpdated: data.lastUpdated,
messageCount: data.conversationHistory?.length || 0,
screenAnalysisCount: data.screenAnalysisHistory?.length || 0,
profile: data.profile || null,
customPrompt: data.customPrompt || null,
};
}
return null;
})
.filter(Boolean);
} catch (error) {
console.error("Error reading sessions:", error.message);
return [];
}
} }
function deleteSession(sessionId) { function deleteSession(sessionId) {
const sessionPath = getSessionPath(sessionId); const sessionPath = getSessionPath(sessionId);
try { try {
if (fs.existsSync(sessionPath)) { if (fs.existsSync(sessionPath)) {
fs.unlinkSync(sessionPath); fs.unlinkSync(sessionPath);
return true; return true;
}
} catch (error) {
console.error('Error deleting session:', error.message);
} }
return false; } catch (error) {
console.error("Error deleting session:", error.message);
}
return false;
} }
function deleteAllSessions() { function deleteAllSessions() {
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
try { try {
if (fs.existsSync(historyDir)) { if (fs.existsSync(historyDir)) {
const files = fs.readdirSync(historyDir).filter(f => f.endsWith('.json')); const files = fs
files.forEach(file => { .readdirSync(historyDir)
fs.unlinkSync(path.join(historyDir, file)); .filter((f) => f.endsWith(".json"));
}); files.forEach((file) => {
} fs.unlinkSync(path.join(historyDir, file));
return true; });
} catch (error) {
console.error('Error deleting all sessions:', error.message);
return false;
} }
return true;
} catch (error) {
console.error("Error deleting all sessions:", error.message);
return false;
}
} }
// ============ CLEAR ALL DATA ============ // ============ CLEAR ALL DATA ============
function clearAllData() { function clearAllData() {
resetConfigDir(); resetConfigDir();
return true; return true;
} }
module.exports = { module.exports = {
// Initialization // Initialization
initializeStorage, initializeStorage,
getConfigDir, getConfigDir,
// Migration // Config
hasOldConfig, getConfig,
migrateFromOldConfig, setConfig,
updateConfig,
// Config // Credentials
getConfig, getCredentials,
setConfig, setCredentials,
updateConfig, getApiKey,
setApiKey,
getGroqApiKey,
setGroqApiKey,
getOpenAICompatibleConfig,
setOpenAICompatibleConfig,
// Credentials // Preferences
getCredentials, getPreferences,
setCredentials, setPreferences,
getApiKey, updatePreference,
setApiKey,
getOpenAICredentials,
setOpenAICredentials,
getOpenAISDKCredentials,
setOpenAISDKCredentials,
// Preferences // Keybinds
getPreferences, getKeybinds,
setPreferences, setKeybinds,
updatePreference,
// Keybinds // Limits (Rate Limiting)
getKeybinds, getLimits,
setKeybinds, setLimits,
getTodayLimits,
incrementLimitCount,
getAvailableModel,
incrementCharUsage,
getModelForToday,
// Limits (Rate Limiting) // History
getLimits, saveSession,
setLimits, getSession,
getTodayLimits, getAllSessions,
incrementLimitCount, deleteSession,
getAvailableModel, deleteAllSessions,
// History // Clear all
saveSession, clearAllData,
getSession,
getAllSessions,
deleteSession,
deleteAllSessions,
// Clear all
clearAllData,
}; };

View File

@ -1,464 +0,0 @@
const { BrowserWindow, ipcMain } = require('electron');
const { getSystemPrompt } = require('./prompts');
const { getAvailableModel, incrementLimitCount, getApiKey, getOpenAICredentials, getOpenAISDKCredentials, getPreferences } = require('../storage');
// Import provider implementations
const geminiProvider = require('./gemini');
const openaiRealtimeProvider = require('./openai-realtime');
const openaiSdkProvider = require('./openai-sdk');
// Conversation tracking (shared across providers)
let currentSessionId = null;
let conversationHistory = [];
let screenAnalysisHistory = [];
let currentProfile = null;
let currentCustomPrompt = null;
let currentProvider = 'gemini'; // 'gemini', 'openai-realtime', or 'openai-sdk'
let providerConfig = {};
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function initializeNewSession(profile = null, customPrompt = null) {
currentSessionId = Date.now().toString();
conversationHistory = [];
screenAnalysisHistory = [];
currentProfile = profile;
currentCustomPrompt = customPrompt;
console.log('New conversation session started:', currentSessionId, 'profile:', profile, 'provider:', currentProvider);
if (profile) {
sendToRenderer('save-session-context', {
sessionId: currentSessionId,
profile: profile,
customPrompt: customPrompt || '',
provider: currentProvider,
});
}
}
function saveConversationTurn(transcription, aiResponse) {
if (!currentSessionId) {
initializeNewSession();
}
const conversationTurn = {
timestamp: Date.now(),
transcription: transcription.trim(),
ai_response: aiResponse.trim(),
};
conversationHistory.push(conversationTurn);
console.log('Saved conversation turn:', conversationTurn);
sendToRenderer('save-conversation-turn', {
sessionId: currentSessionId,
turn: conversationTurn,
fullHistory: conversationHistory,
});
}
function saveScreenAnalysis(prompt, response, model) {
if (!currentSessionId) {
initializeNewSession();
}
const analysisEntry = {
timestamp: Date.now(),
prompt: prompt,
response: response.trim(),
model: model,
provider: currentProvider,
};
screenAnalysisHistory.push(analysisEntry);
console.log('Saved screen analysis:', analysisEntry);
sendToRenderer('save-screen-analysis', {
sessionId: currentSessionId,
analysis: analysisEntry,
fullHistory: screenAnalysisHistory,
profile: currentProfile,
customPrompt: currentCustomPrompt,
});
}
function getCurrentSessionData() {
return {
sessionId: currentSessionId,
history: conversationHistory,
provider: currentProvider,
};
}
// Get provider configuration from storage
async function getStoredSetting(key, defaultValue) {
try {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
await new Promise(resolve => setTimeout(resolve, 100));
const value = await windows[0].webContents.executeJavaScript(`
(function() {
try {
if (typeof localStorage === 'undefined') {
return '${defaultValue}';
}
const stored = localStorage.getItem('${key}');
return stored || '${defaultValue}';
} catch (e) {
return '${defaultValue}';
}
})()
`);
return value;
}
} catch (error) {
console.error('Error getting stored setting for', key, ':', error.message);
}
return defaultValue;
}
// Initialize AI session based on selected provider
async function initializeAISession(customPrompt = '', profile = 'interview', language = 'en-US') {
// Read provider from file-based storage (preferences.json)
const prefs = getPreferences();
const provider = prefs.aiProvider || 'gemini';
currentProvider = provider;
console.log('Initializing AI session with provider:', provider);
// Check if Google Search is enabled for system prompt
const googleSearchEnabled = prefs.googleSearchEnabled ?? true;
const systemPrompt = getSystemPrompt(profile, customPrompt, googleSearchEnabled);
if (provider === 'openai-realtime') {
// Get OpenAI Realtime configuration
const creds = getOpenAICredentials();
if (!creds.apiKey) {
sendToRenderer('update-status', 'OpenAI API key not configured');
return false;
}
providerConfig = {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl || null,
model: creds.model,
systemPrompt,
language,
isReconnect: false,
};
initializeNewSession(profile, customPrompt);
try {
await openaiRealtimeProvider.initializeOpenAISession(providerConfig, conversationHistory);
return true;
} catch (error) {
console.error('Failed to initialize OpenAI Realtime session:', error);
sendToRenderer('update-status', 'Failed to connect to OpenAI Realtime');
return false;
}
} else if (provider === 'openai-sdk') {
// Get OpenAI SDK configuration (for BotHub, etc.)
const creds = getOpenAISDKCredentials();
if (!creds.apiKey) {
sendToRenderer('update-status', 'OpenAI SDK API key not configured');
return false;
}
providerConfig = {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl || null,
model: creds.model,
visionModel: creds.visionModel,
whisperModel: creds.whisperModel,
};
initializeNewSession(profile, customPrompt);
try {
await openaiSdkProvider.initializeOpenAISDK(providerConfig);
openaiSdkProvider.setSystemPrompt(systemPrompt);
openaiSdkProvider.updatePushToTalkSettings(prefs.audioInputMode || 'auto');
sendToRenderer('update-status', 'Ready (OpenAI SDK)');
return true;
} catch (error) {
console.error('Failed to initialize OpenAI SDK:', error);
sendToRenderer('update-status', 'Failed to initialize OpenAI SDK: ' + error.message);
return false;
}
} else {
// Use Gemini (default)
const apiKey = getApiKey();
if (!apiKey) {
sendToRenderer('update-status', 'Gemini API key not configured');
return false;
}
const session = await geminiProvider.initializeGeminiSession(apiKey, customPrompt, profile, language);
if (session && global.geminiSessionRef) {
global.geminiSessionRef.current = session;
return true;
}
return false;
}
}
// Send audio to appropriate provider
async function sendAudioContent(data, mimeType, isSystemAudio = true) {
if (currentProvider === 'openai-realtime') {
return await openaiRealtimeProvider.sendAudioToOpenAI(data);
} else if (currentProvider === 'openai-sdk') {
// OpenAI SDK buffers audio and transcribes on flush
return await openaiSdkProvider.processAudioChunk(data, mimeType);
} else {
// Gemini
if (!global.geminiSessionRef?.current) {
return { success: false, error: 'No active Gemini session' };
}
try {
const marker = isSystemAudio ? '.' : ',';
process.stdout.write(marker);
await global.geminiSessionRef.current.sendRealtimeInput({
audio: { data, mimeType },
});
return { success: true };
} catch (error) {
console.error('Error sending audio to Gemini:', error);
return { success: false, error: error.message };
}
}
}
// Send image to appropriate provider
async function sendImageContent(data, prompt) {
if (currentProvider === 'openai-realtime') {
const creds = getOpenAICredentials();
const result = await openaiRealtimeProvider.sendImageToOpenAI(data, prompt, {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl,
model: creds.model,
});
if (result.success) {
saveScreenAnalysis(prompt, result.text, result.model);
}
return result;
} else if (currentProvider === 'openai-sdk') {
const result = await openaiSdkProvider.sendImageMessage(data, prompt);
if (result.success) {
saveScreenAnalysis(prompt, result.text, result.model);
}
return result;
} else {
// Use Gemini HTTP API
const result = await geminiProvider.sendImageToGeminiHttp(data, prompt);
// Screen analysis is saved inside sendImageToGeminiHttp for Gemini
return result;
}
}
// Send text message to appropriate provider
async function sendTextMessage(text) {
if (currentProvider === 'openai-realtime') {
return await openaiRealtimeProvider.sendTextToOpenAI(text);
} else if (currentProvider === 'openai-sdk') {
const result = await openaiSdkProvider.sendTextMessage(text);
if (result.success && result.text) {
saveConversationTurn(text, result.text);
}
return result;
} else {
// Gemini
if (!global.geminiSessionRef?.current) {
return { success: false, error: 'No active Gemini session' };
}
try {
console.log('Sending text message to Gemini:', text);
await global.geminiSessionRef.current.sendRealtimeInput({ text: text.trim() });
return { success: true };
} catch (error) {
console.error('Error sending text to Gemini:', error);
return { success: false, error: error.message };
}
}
}
// Close session for appropriate provider
async function closeSession() {
try {
if (currentProvider === 'openai-realtime') {
openaiRealtimeProvider.closeOpenAISession();
} else if (currentProvider === 'openai-sdk') {
openaiSdkProvider.closeOpenAISDK();
} else {
geminiProvider.stopMacOSAudioCapture();
if (global.geminiSessionRef?.current) {
await global.geminiSessionRef.current.close();
global.geminiSessionRef.current = null;
}
}
return { success: true };
} catch (error) {
console.error('Error closing session:', error);
return { success: false, error: error.message };
}
}
// Setup IPC handlers
function setupAIProviderIpcHandlers(geminiSessionRef) {
// Store reference for Gemini
global.geminiSessionRef = geminiSessionRef;
// Listen for conversation turn save requests from providers
ipcMain.on('save-conversation-turn-data', (event, { transcription, response }) => {
saveConversationTurn(transcription, response);
});
ipcMain.on('push-to-talk-toggle', () => {
if (currentProvider === 'openai-sdk') {
openaiSdkProvider.togglePushToTalk();
}
});
ipcMain.on('update-push-to-talk-settings', (event, { inputMode } = {}) => {
openaiSdkProvider.updatePushToTalkSettings(inputMode || 'auto');
});
ipcMain.handle('initialize-ai-session', async (event, customPrompt, profile, language) => {
return await initializeAISession(customPrompt, profile, language);
});
ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => {
return await sendAudioContent(data, mimeType, true);
});
ipcMain.handle('send-mic-audio-content', async (event, { data, mimeType }) => {
return await sendAudioContent(data, mimeType, false);
});
ipcMain.handle('send-image-content', async (event, { data, prompt }) => {
return await sendImageContent(data, prompt);
});
ipcMain.handle('send-text-message', async (event, text) => {
return await sendTextMessage(text);
});
ipcMain.handle('close-session', async event => {
return await closeSession();
});
// macOS system audio
ipcMain.handle('start-macos-audio', async event => {
if (process.platform !== 'darwin') {
return {
success: false,
error: 'macOS audio capture only available on macOS',
};
}
try {
if (currentProvider === 'gemini') {
const success = await geminiProvider.startMacOSAudioCapture(global.geminiSessionRef);
return { success };
} else if (currentProvider === 'openai-sdk') {
const success = await openaiSdkProvider.startMacOSAudioCapture();
return { success };
} else if (currentProvider === 'openai-realtime') {
// OpenAI Realtime uses WebSocket, handle differently if needed
return {
success: false,
error: 'OpenAI Realtime uses WebSocket for audio',
};
}
return {
success: false,
error: 'Unknown provider: ' + currentProvider,
};
} catch (error) {
console.error('Error starting macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('stop-macos-audio', async event => {
try {
if (currentProvider === 'gemini') {
geminiProvider.stopMacOSAudioCapture();
} else if (currentProvider === 'openai-sdk') {
openaiSdkProvider.stopMacOSAudioCapture();
}
return { success: true };
} catch (error) {
console.error('Error stopping macOS audio capture:', error);
return { success: false, error: error.message };
}
});
// Session management
ipcMain.handle('get-current-session', async event => {
try {
return { success: true, data: getCurrentSessionData() };
} catch (error) {
console.error('Error getting current session:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('start-new-session', async event => {
try {
initializeNewSession();
return { success: true, sessionId: currentSessionId };
} catch (error) {
console.error('Error starting new session:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('update-google-search-setting', async (event, enabled) => {
try {
console.log('Google Search setting updated to:', enabled);
return { success: true };
} catch (error) {
console.error('Error updating Google Search setting:', error);
return { success: false, error: error.message };
}
});
// Provider switching
ipcMain.handle('switch-ai-provider', async (event, provider) => {
try {
console.log('Switching AI provider to:', provider);
currentProvider = provider;
return { success: true };
} catch (error) {
console.error('Error switching provider:', error);
return { success: false, error: error.message };
}
});
}
module.exports = {
setupAIProviderIpcHandlers,
initializeAISession,
sendAudioContent,
sendImageContent,
sendTextMessage,
closeSession,
getCurrentSessionData,
initializeNewSession,
saveConversationTurn,
};

File diff suppressed because it is too large Load Diff

854
src/utils/localai.js Normal file
View File

@ -0,0 +1,854 @@
const { Ollama } = require("ollama");
const { getSystemPrompt } = require("./prompts");
const {
sendToRenderer,
initializeNewSession,
saveConversationTurn,
} = require("./gemini");
const { fork } = require("child_process");
const path = require("path");
const { getSystemNode } = require("./nodeDetect");
// ── State ──
let ollamaClient = null;
let ollamaModel = null;
let whisperWorker = null;
let isWhisperLoading = false;
let whisperReady = false;
let localConversationHistory = [];
let currentSystemPrompt = null;
let isLocalActive = false;
// Set when we intentionally kill the worker to suppress crash handling
let whisperShuttingDown = false;
// Pending transcription callback (one at a time)
let pendingTranscribe = null;
// VAD state
let isSpeaking = false;
let speechBuffers = [];
let silenceFrameCount = 0;
let speechFrameCount = 0;
// VAD configuration
const VAD_MODES = {
NORMAL: {
energyThreshold: 0.01,
speechFramesRequired: 3,
silenceFramesRequired: 30,
},
LOW_BITRATE: {
energyThreshold: 0.008,
speechFramesRequired: 4,
silenceFramesRequired: 35,
},
AGGRESSIVE: {
energyThreshold: 0.015,
speechFramesRequired: 2,
silenceFramesRequired: 20,
},
VERY_AGGRESSIVE: {
energyThreshold: 0.02,
speechFramesRequired: 2,
silenceFramesRequired: 15,
},
};
let vadConfig = VAD_MODES.VERY_AGGRESSIVE;
// Maximum speech buffer size: ~30 seconds at 16kHz, 16-bit mono
const MAX_SPEECH_BUFFER_BYTES = 16000 * 2 * 30; // 960,000 bytes
// Audio resampling buffer
let resampleRemainder = Buffer.alloc(0);
// ── Audio Resampling (24kHz → 16kHz) ──
function resample24kTo16k(inputBuffer) {
// Combine with any leftover samples from previous call
const combined = Buffer.concat([resampleRemainder, inputBuffer]);
const inputSamples = Math.floor(combined.length / 2); // 16-bit = 2 bytes per sample
// Ratio: 16000/24000 = 2/3, so for every 3 input samples we produce 2 output samples
const outputSamples = Math.floor((inputSamples * 2) / 3);
const outputBuffer = Buffer.alloc(outputSamples * 2);
for (let i = 0; i < outputSamples; i++) {
// Map output sample index to input position
const srcPos = (i * 3) / 2;
const srcIndex = Math.floor(srcPos);
const frac = srcPos - srcIndex;
const s0 = combined.readInt16LE(srcIndex * 2);
const s1 =
srcIndex + 1 < inputSamples
? combined.readInt16LE((srcIndex + 1) * 2)
: s0;
const interpolated = Math.round(s0 + frac * (s1 - s0));
outputBuffer.writeInt16LE(
Math.max(-32768, Math.min(32767, interpolated)),
i * 2,
);
}
// Store remainder for next call
const consumedInputSamples = Math.ceil((outputSamples * 3) / 2);
const remainderStart = consumedInputSamples * 2;
resampleRemainder =
remainderStart < combined.length
? combined.slice(remainderStart)
: Buffer.alloc(0);
return outputBuffer;
}
// ── VAD (Voice Activity Detection) ──
function calculateRMS(pcm16Buffer) {
const samples = pcm16Buffer.length / 2;
if (samples === 0) return 0;
let sumSquares = 0;
for (let i = 0; i < samples; i++) {
const sample = pcm16Buffer.readInt16LE(i * 2) / 32768;
sumSquares += sample * sample;
}
return Math.sqrt(sumSquares / samples);
}
function processVAD(pcm16kBuffer) {
const rms = calculateRMS(pcm16kBuffer);
const isVoice = rms > vadConfig.energyThreshold;
if (isVoice) {
speechFrameCount++;
silenceFrameCount = 0;
if (!isSpeaking && speechFrameCount >= vadConfig.speechFramesRequired) {
isSpeaking = true;
speechBuffers = [];
console.log("[LocalAI] Speech started (RMS:", rms.toFixed(4), ")");
sendToRenderer("update-status", "Listening... (speech detected)");
}
} else {
silenceFrameCount++;
speechFrameCount = 0;
if (isSpeaking && silenceFrameCount >= vadConfig.silenceFramesRequired) {
isSpeaking = false;
console.log(
"[LocalAI] Speech ended, accumulated",
speechBuffers.length,
"chunks",
);
sendToRenderer("update-status", "Transcribing...");
// Trigger transcription with accumulated audio
const audioData = Buffer.concat(speechBuffers);
speechBuffers = [];
handleSpeechEnd(audioData).catch((err) => {
console.error("[LocalAI] handleSpeechEnd crashed:", err);
sendToRenderer(
"update-status",
"Transcription error: " + (err?.message || "unknown"),
);
});
return;
}
}
// Accumulate audio during speech
if (isSpeaking) {
speechBuffers.push(Buffer.from(pcm16kBuffer));
// Cap buffer at ~30 seconds to prevent OOM and ONNX tensor overflow
const totalBytes = speechBuffers.reduce((sum, b) => sum + b.length, 0);
if (totalBytes >= MAX_SPEECH_BUFFER_BYTES) {
isSpeaking = false;
console.log(
"[LocalAI] Speech buffer limit reached (" +
totalBytes +
" bytes), forcing transcription",
);
sendToRenderer("update-status", "Transcribing (max length reached)...");
const audioData = Buffer.concat(speechBuffers);
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
handleSpeechEnd(audioData).catch((err) => {
console.error("[LocalAI] handleSpeechEnd crashed:", err);
sendToRenderer(
"update-status",
"Transcription error: " + (err?.message || "unknown"),
);
});
}
}
}
// ── Whisper Worker (isolated child process) ──
function spawnWhisperWorker() {
if (whisperWorker) return;
const workerPath = path.join(__dirname, "whisperWorker.js");
console.log("[LocalAI] Spawning Whisper worker:", workerPath);
// Determine the best way to spawn the worker:
// 1. System Node.js (preferred) — native addons were compiled against this
// ABI, so onnxruntime-node works without SIGTRAP / ABI mismatches.
// 2. Electron utilityProcess (packaged builds) — proper Node.js child
// process API that doesn't require the RunAsNode fuse.
// 3. ELECTRON_RUN_AS_NODE (last resort, dev only) — the old approach that
// only works when the RunAsNode fuse isn't flipped.
const systemNode = getSystemNode();
if (systemNode) {
// Spawn with system Node.js — onnxruntime-node native binary matches ABI
console.log("[LocalAI] Using system Node.js:", systemNode.nodePath);
whisperWorker = fork(workerPath, [], {
stdio: ["pipe", "pipe", "pipe", "ipc"],
execPath: systemNode.nodePath,
env: {
...process.env,
// Unset ELECTRON_RUN_AS_NODE so the system node doesn't inherit it
ELECTRON_RUN_AS_NODE: undefined,
},
});
} else {
// No system Node.js found — try utilityProcess (Electron >= 22)
// utilityProcess.fork() creates a proper child Node.js process without
// needing the RunAsNode fuse. Falls back to ELECTRON_RUN_AS_NODE for
// dev mode where fuses aren't applied.
try {
const { utilityProcess: UP } = require("electron");
if (UP && typeof UP.fork === "function") {
console.log("[LocalAI] Using Electron utilityProcess");
const up = UP.fork(workerPath);
// Wrap utilityProcess to look like a ChildProcess for the rest of localai.js
whisperWorker = wrapUtilityProcess(up);
return;
}
} catch (_) {
// utilityProcess not available (older Electron or renderer context)
}
console.warn(
"[LocalAI] No system Node.js — falling back to ELECTRON_RUN_AS_NODE (WASM backend will be used)",
);
whisperWorker = fork(workerPath, [], {
stdio: ["pipe", "pipe", "pipe", "ipc"],
env: { ...process.env, ELECTRON_RUN_AS_NODE: "1" },
});
}
whisperWorker.stdout.on("data", (data) => {
console.log("[WhisperWorker stdout]", data.toString().trim());
});
whisperWorker.stderr.on("data", (data) => {
console.error("[WhisperWorker stderr]", data.toString().trim());
});
whisperWorker.on("message", (msg) => {
switch (msg.type) {
case "ready":
console.log("[LocalAI] Whisper worker ready");
break;
case "load-result":
handleWorkerLoadResult(msg);
break;
case "transcribe-result":
handleWorkerTranscribeResult(msg);
break;
case "status":
sendToRenderer("update-status", msg.message);
break;
case "progress":
sendToRenderer("whisper-progress", {
file: msg.file,
progress: msg.progress,
loaded: msg.loaded,
total: msg.total,
status: msg.status,
});
break;
}
});
whisperWorker.on("exit", (code, signal) => {
console.error(
"[LocalAI] Whisper worker exited — code:",
code,
"signal:",
signal,
);
whisperWorker = null;
whisperReady = false;
// If we intentionally shut down, don't treat as crash
if (whisperShuttingDown) {
whisperShuttingDown = false;
return;
}
// Reject any pending transcription
if (pendingTranscribe) {
pendingTranscribe.reject(
new Error(
"Whisper worker crashed (code: " + code + ", signal: " + signal + ")",
),
);
pendingTranscribe = null;
}
// If session is still active, inform the user and respawn
if (isLocalActive) {
sendToRenderer(
"update-status",
"Whisper crashed (signal: " +
(signal || code) +
"). Respawning worker...",
);
setTimeout(() => {
if (isLocalActive) {
respawnWhisperWorker();
}
}, 2000);
}
});
whisperWorker.on("error", (err) => {
console.error("[LocalAI] Whisper worker error:", err);
whisperWorker = null;
whisperReady = false;
});
}
/**
* Wrap Electron's utilityProcess to behave like a ChildProcess (duck-typing)
* so the rest of localai.js can use the same API.
*/
function wrapUtilityProcess(up) {
const EventEmitter = require("events");
const wrapper = new EventEmitter();
// Forward messages
up.on("message", (msg) => wrapper.emit("message", msg));
// Map utilityProcess exit to ChildProcess-like exit event
up.on("exit", (code) => wrapper.emit("exit", code, null));
// Provide stdout/stderr stubs (utilityProcess pipes to parent console)
const { Readable } = require("stream");
wrapper.stdout = new Readable({ read() {} });
wrapper.stderr = new Readable({ read() {} });
wrapper.send = (data) => up.postMessage(data);
wrapper.kill = (signal) => up.kill();
wrapper.removeAllListeners = () => {
up.removeAllListeners();
EventEmitter.prototype.removeAllListeners.call(wrapper);
};
// Setup stdout/stderr forwarding
wrapper.stdout.on("data", (data) => {
console.log("[WhisperWorker stdout]", data.toString().trim());
});
wrapper.stderr.on("data", (data) => {
console.error("[WhisperWorker stderr]", data.toString().trim());
});
return wrapper;
}
let pendingLoad = null;
function handleWorkerLoadResult(msg) {
if (msg.success) {
console.log(
"[LocalAI] Whisper model loaded successfully (in worker, device:",
msg.device || "unknown",
")",
);
whisperReady = true;
sendToRenderer("whisper-downloading", false);
isWhisperLoading = false;
if (pendingLoad) {
pendingLoad.resolve(true);
pendingLoad = null;
}
} else {
console.error("[LocalAI] Whisper worker failed to load model:", msg.error);
sendToRenderer("whisper-downloading", false);
sendToRenderer(
"update-status",
"Failed to load Whisper model: " + msg.error,
);
isWhisperLoading = false;
if (pendingLoad) {
pendingLoad.resolve(false);
pendingLoad = null;
}
}
}
function handleWorkerTranscribeResult(msg) {
if (!pendingTranscribe) return;
if (msg.success) {
console.log("[LocalAI] Transcription:", msg.text);
pendingTranscribe.resolve(msg.text || null);
} else {
console.error("[LocalAI] Worker transcription error:", msg.error);
pendingTranscribe.resolve(null);
}
pendingTranscribe = null;
}
function respawnWhisperWorker() {
killWhisperWorker();
spawnWhisperWorker();
const { app } = require("electron");
const cacheDir = path.join(app.getPath("userData"), "whisper-models");
const modelName =
require("../storage").getPreferences().whisperModel ||
"Xenova/whisper-small";
sendToRenderer("whisper-downloading", true);
isWhisperLoading = true;
const device = resolveWhisperDevice();
whisperWorker.send({ type: "load", modelName, cacheDir, device });
}
/**
* Determine which ONNX backend to use for Whisper inference.
* - "cpu" onnxruntime-node (fast, native requires matching ABI)
* - "wasm" onnxruntime-web (slower but universally compatible)
*
* When spawned with system Node.js, native CPU backend is safe.
* Otherwise default to WASM to prevent native crashes.
*/
function resolveWhisperDevice() {
const prefs = require("../storage").getPreferences();
if (prefs.whisperDevice) return prefs.whisperDevice;
// Auto-detect: if we're running with system Node.js, native is safe
const systemNode = getSystemNode();
return systemNode ? "cpu" : "wasm";
}
/**
* Map the app's BCP-47 language tag (e.g. "en-US", "ru-RU") to the
* ISO 639-1 code that Whisper expects (e.g. "en", "ru").
* Returns "auto" when the user selected auto-detect, which tells the
* worker to let Whisper detect the language itself.
*/
function resolveWhisperLanguage() {
const prefs = require("../storage").getPreferences();
const lang = prefs.selectedLanguage || "en-US";
if (lang === "auto") return "auto";
// BCP-47: primary subtag is the ISO 639 code
// Handle special case: "cmn-CN" → "zh" (Mandarin Chinese → Whisper uses "zh")
const primary = lang.split("-")[0].toLowerCase();
const WHISPER_LANG_MAP = {
cmn: "zh",
yue: "zh",
};
return WHISPER_LANG_MAP[primary] || primary;
}
function killWhisperWorker() {
if (whisperWorker) {
whisperShuttingDown = true;
try {
whisperWorker.removeAllListeners();
whisperWorker.kill();
} catch (_) {
// Already dead
}
whisperWorker = null;
whisperReady = false;
}
}
async function loadWhisperPipeline(modelName) {
if (whisperReady) return true;
if (isWhisperLoading) return null;
isWhisperLoading = true;
console.log("[LocalAI] Loading Whisper model via worker:", modelName);
sendToRenderer("whisper-downloading", true);
sendToRenderer(
"update-status",
"Loading Whisper model (first time may take a while)...",
);
spawnWhisperWorker();
const { app } = require("electron");
const cacheDir = path.join(app.getPath("userData"), "whisper-models");
const device = resolveWhisperDevice();
console.log("[LocalAI] Whisper device:", device);
return new Promise((resolve) => {
pendingLoad = { resolve };
whisperWorker.send({ type: "load", modelName, cacheDir, device });
});
}
async function transcribeAudio(pcm16kBuffer) {
if (!whisperReady || !whisperWorker) {
console.error("[LocalAI] Whisper worker not ready");
return null;
}
if (!pcm16kBuffer || pcm16kBuffer.length < 2) {
console.error("[LocalAI] Invalid audio buffer:", pcm16kBuffer?.length);
return null;
}
console.log(
"[LocalAI] Starting transcription, audio length:",
pcm16kBuffer.length,
"bytes",
);
// Send audio to worker as base64 (IPC serialization)
const audioBase64 = pcm16kBuffer.toString("base64");
return new Promise((resolve, reject) => {
// Timeout: if worker takes > 60s, assume it's stuck
const timeout = setTimeout(() => {
console.error("[LocalAI] Transcription timed out after 60s");
if (pendingTranscribe) {
pendingTranscribe = null;
resolve(null);
}
}, 60000);
pendingTranscribe = {
resolve: (val) => {
clearTimeout(timeout);
resolve(val);
},
reject: (err) => {
clearTimeout(timeout);
reject(err);
},
};
try {
whisperWorker.send({
type: "transcribe",
audioBase64,
language: resolveWhisperLanguage(),
});
} catch (err) {
clearTimeout(timeout);
pendingTranscribe = null;
console.error("[LocalAI] Failed to send to worker:", err);
resolve(null);
}
});
}
// ── Speech End Handler ──
async function handleSpeechEnd(audioData) {
if (!isLocalActive) return;
// Minimum audio length check (~0.5 seconds at 16kHz, 16-bit)
if (audioData.length < 16000) {
console.log("[LocalAI] Audio too short, skipping");
sendToRenderer("update-status", "Listening...");
return;
}
console.log("[LocalAI] Processing audio:", audioData.length, "bytes");
try {
const transcription = await transcribeAudio(audioData);
if (
!transcription ||
transcription.trim() === "" ||
transcription.trim().length < 2
) {
console.log("[LocalAI] Empty transcription, skipping");
sendToRenderer("update-status", "Listening...");
return;
}
sendToRenderer("update-status", "Generating response...");
await sendToOllama(transcription);
} catch (error) {
console.error("[LocalAI] handleSpeechEnd error:", error);
sendToRenderer(
"update-status",
"Error: " + (error?.message || "transcription failed"),
);
}
}
// ── Ollama Chat ──
async function sendToOllama(transcription) {
if (!ollamaClient || !ollamaModel) {
console.error("[LocalAI] Ollama not configured");
return;
}
console.log(
"[LocalAI] Sending to Ollama:",
transcription.substring(0, 100) + "...",
);
localConversationHistory.push({
role: "user",
content: transcription.trim(),
});
// Keep history manageable
if (localConversationHistory.length > 20) {
localConversationHistory = localConversationHistory.slice(-20);
}
try {
const messages = [
{
role: "system",
content: currentSystemPrompt || "You are a helpful assistant.",
},
...localConversationHistory,
];
const response = await ollamaClient.chat({
model: ollamaModel,
messages,
stream: true,
});
let fullText = "";
let isFirst = true;
for await (const part of response) {
const token = part.message?.content || "";
if (token) {
fullText += token;
sendToRenderer(isFirst ? "new-response" : "update-response", fullText);
isFirst = false;
}
}
if (fullText.trim()) {
localConversationHistory.push({
role: "assistant",
content: fullText.trim(),
});
saveConversationTurn(transcription, fullText);
}
console.log("[LocalAI] Ollama response completed");
sendToRenderer("update-status", "Listening...");
} catch (error) {
console.error("[LocalAI] Ollama error:", error);
sendToRenderer("update-status", "Ollama error: " + error.message);
}
}
// ── Public API ──
async function initializeLocalSession(
ollamaHost,
model,
whisperModel,
profile,
customPrompt,
) {
console.log("[LocalAI] Initializing local session:", {
ollamaHost,
model,
whisperModel,
profile,
});
sendToRenderer("session-initializing", true);
try {
// Setup system prompt
currentSystemPrompt = getSystemPrompt(profile, customPrompt, false);
// Initialize Ollama client
ollamaClient = new Ollama({ host: ollamaHost });
ollamaModel = model;
// Test Ollama connection
try {
await ollamaClient.list();
console.log("[LocalAI] Ollama connection verified");
} catch (error) {
console.error(
"[LocalAI] Cannot connect to Ollama at",
ollamaHost,
":",
error.message,
);
sendToRenderer("session-initializing", false);
sendToRenderer(
"update-status",
"Cannot connect to Ollama at " + ollamaHost,
);
return false;
}
// Load Whisper model
const pipeline = await loadWhisperPipeline(whisperModel);
if (!pipeline) {
sendToRenderer("session-initializing", false);
return false;
}
// Reset VAD state
isSpeaking = false;
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
resampleRemainder = Buffer.alloc(0);
localConversationHistory = [];
// Initialize conversation session
initializeNewSession(profile, customPrompt);
isLocalActive = true;
sendToRenderer("session-initializing", false);
sendToRenderer("update-status", "Local AI ready - Listening...");
console.log("[LocalAI] Session initialized successfully");
return true;
} catch (error) {
console.error("[LocalAI] Initialization error:", error);
sendToRenderer("session-initializing", false);
sendToRenderer("update-status", "Local AI error: " + error.message);
return false;
}
}
function processLocalAudio(monoChunk24k) {
if (!isLocalActive) return;
// Resample from 24kHz to 16kHz
const pcm16k = resample24kTo16k(monoChunk24k);
if (pcm16k.length > 0) {
processVAD(pcm16k);
}
}
function closeLocalSession() {
console.log("[LocalAI] Closing local session");
isLocalActive = false;
isSpeaking = false;
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
resampleRemainder = Buffer.alloc(0);
localConversationHistory = [];
ollamaClient = null;
ollamaModel = null;
currentSystemPrompt = null;
// Note: whisperWorker is kept alive to avoid reloading model on next session
// To fully clean up, call killWhisperWorker()
}
function isLocalSessionActive() {
return isLocalActive;
}
// ── Send text directly to Ollama (for manual text input) ──
async function sendLocalText(text) {
if (!isLocalActive || !ollamaClient) {
return { success: false, error: "No active local session" };
}
try {
await sendToOllama(text);
return { success: true };
} catch (error) {
return { success: false, error: error.message };
}
}
async function sendLocalImage(base64Data, prompt) {
if (!isLocalActive || !ollamaClient) {
return { success: false, error: "No active local session" };
}
try {
console.log("[LocalAI] Sending image to Ollama");
sendToRenderer("update-status", "Analyzing image...");
const userMessage = {
role: "user",
content: prompt,
images: [base64Data],
};
// Store text-only version in history
localConversationHistory.push({ role: "user", content: prompt });
if (localConversationHistory.length > 20) {
localConversationHistory = localConversationHistory.slice(-20);
}
const messages = [
{
role: "system",
content: currentSystemPrompt || "You are a helpful assistant.",
},
...localConversationHistory.slice(0, -1),
userMessage,
];
const response = await ollamaClient.chat({
model: ollamaModel,
messages,
stream: true,
});
let fullText = "";
let isFirst = true;
for await (const part of response) {
const token = part.message?.content || "";
if (token) {
fullText += token;
sendToRenderer(isFirst ? "new-response" : "update-response", fullText);
isFirst = false;
}
}
if (fullText.trim()) {
localConversationHistory.push({
role: "assistant",
content: fullText.trim(),
});
saveConversationTurn(prompt, fullText);
}
console.log("[LocalAI] Image response completed");
sendToRenderer("update-status", "Listening...");
return { success: true, text: fullText, model: ollamaModel };
} catch (error) {
console.error("[LocalAI] Image error:", error);
sendToRenderer("update-status", "Ollama error: " + error.message);
return { success: false, error: error.message };
}
}
module.exports = {
initializeLocalSession,
processLocalAudio,
closeLocalSession,
isLocalSessionActive,
sendLocalText,
sendLocalImage,
};

View File

@ -1,99 +0,0 @@
const fs = require('fs');
const path = require('path');
const { app } = require('electron');
let logFile = null;
let logPath = null;
function getLogPath() {
if (logPath) return logPath;
const userDataPath = app.getPath('userData');
const logsDir = path.join(userDataPath, 'logs');
// Create logs directory if it doesn't exist
if (!fs.existsSync(logsDir)) {
fs.mkdirSync(logsDir, { recursive: true });
}
// Create log file with timestamp
const timestamp = new Date().toISOString().split('T')[0];
logPath = path.join(logsDir, `app-${timestamp}.log`);
return logPath;
}
function initLogger() {
try {
const filePath = getLogPath();
logFile = fs.createWriteStream(filePath, { flags: 'a' });
const startMsg = `\n${'='.repeat(60)}\nApp started at ${new Date().toISOString()}\nPlatform: ${process.platform}, Arch: ${process.arch}\nElectron: ${process.versions.electron}, Node: ${process.versions.node}\nPackaged: ${app.isPackaged}\n${'='.repeat(60)}\n`;
logFile.write(startMsg);
// Override console methods to also write to file
const originalLog = console.log;
const originalError = console.error;
const originalWarn = console.warn;
console.log = (...args) => {
originalLog.apply(console, args);
writeLog('INFO', args);
};
console.error = (...args) => {
originalError.apply(console, args);
writeLog('ERROR', args);
};
console.warn = (...args) => {
originalWarn.apply(console, args);
writeLog('WARN', args);
};
console.log('Logger initialized, writing to:', filePath);
return filePath;
} catch (err) {
console.error('Failed to initialize logger:', err);
return null;
}
}
function writeLog(level, args) {
if (!logFile) return;
try {
const timestamp = new Date().toISOString();
const message = args
.map(arg => {
if (typeof arg === 'object') {
try {
return JSON.stringify(arg, null, 2);
} catch {
return String(arg);
}
}
return String(arg);
})
.join(' ');
logFile.write(`[${timestamp}] [${level}] ${message}\n`);
} catch (err) {
// Silently fail - don't want logging errors to crash the app
}
}
function closeLogger() {
if (logFile) {
logFile.write(`\nApp closed at ${new Date().toISOString()}\n`);
logFile.end();
logFile = null;
}
}
module.exports = {
initLogger,
closeLogger,
getLogPath,
};

177
src/utils/nodeDetect.js Normal file
View File

@ -0,0 +1,177 @@
/**
* nodeDetect.js Locate the system Node.js binary.
*
* When spawning child processes that rely on native addons compiled against the
* system Node.js ABI (e.g. onnxruntime-node), we must NOT run them inside
* Electron's embedded Node.js runtime the ABI mismatch causes SIGTRAP /
* SIGSEGV crashes. This module finds the real system `node` binary so we can
* pass it as `execPath` to `child_process.fork()`.
*
* Falls back to `null` when no system Node.js is found, letting the caller
* decide on an alternative strategy (e.g. WASM backend).
*/
const { execSync } = require("child_process");
const fs = require("fs");
const path = require("path");
const os = require("os");
/** Well-known Node.js install locations per platform. */
const KNOWN_PATHS = {
darwin: [
"/usr/local/bin/node",
"/opt/homebrew/bin/node", // Apple Silicon Homebrew
path.join(os.homedir(), ".nvm/versions/node"), // nvm — needs glob
path.join(os.homedir(), ".volta/bin/node"), // Volta
path.join(os.homedir(), ".fnm/aliases/default/bin/node"), // fnm
path.join(os.homedir(), ".mise/shims/node"), // mise (rtx)
path.join(os.homedir(), ".asdf/shims/node"), // asdf
],
linux: [
"/usr/bin/node",
"/usr/local/bin/node",
path.join(os.homedir(), ".nvm/versions/node"),
path.join(os.homedir(), ".volta/bin/node"),
path.join(os.homedir(), ".fnm/aliases/default/bin/node"),
path.join(os.homedir(), ".mise/shims/node"),
path.join(os.homedir(), ".asdf/shims/node"),
],
win32: [
"C:\\Program Files\\nodejs\\node.exe",
"C:\\Program Files (x86)\\nodejs\\node.exe",
path.join(os.homedir(), "AppData", "Roaming", "nvm", "current", "node.exe"),
path.join(os.homedir(), ".volta", "bin", "node.exe"),
],
};
/**
* Find the latest nvm-installed Node.js binary on macOS / Linux.
* Returns the path to the `node` binary or null.
*/
function findNvmNode() {
const nvmDir = path.join(os.homedir(), ".nvm", "versions", "node");
try {
if (!fs.existsSync(nvmDir)) return null;
const versions = fs.readdirSync(nvmDir).filter((d) => d.startsWith("v"));
if (versions.length === 0) return null;
// Sort semver descending (rough but sufficient)
versions.sort((a, b) => b.localeCompare(a, undefined, { numeric: true }));
const nodeBin = path.join(nvmDir, versions[0], "bin", "node");
if (fs.existsSync(nodeBin)) return nodeBin;
} catch (_) {
// Ignore
}
return null;
}
/**
* Attempt to resolve `node` via the system PATH using `which` (Unix) or
* `where` (Windows). Returns the path string or null.
*/
function whichNode() {
try {
const cmd = process.platform === "win32" ? "where node" : "which node";
const result = execSync(cmd, {
encoding: "utf8",
timeout: 5000,
env: {
...process.env,
// Ensure common manager shim dirs are on PATH
PATH: [
process.env.PATH || "",
"/usr/local/bin",
"/opt/homebrew/bin",
path.join(os.homedir(), ".volta", "bin"),
path.join(os.homedir(), ".fnm", "aliases", "default", "bin"),
path.join(os.homedir(), ".mise", "shims"),
path.join(os.homedir(), ".asdf", "shims"),
].join(process.platform === "win32" ? ";" : ":"),
},
stdio: ["ignore", "pipe", "ignore"],
}).trim();
// `where` on Windows may return multiple lines — take the first
const first = result.split(/\r?\n/)[0].trim();
if (first && fs.existsSync(first)) return first;
} catch (_) {
// Command failed
}
return null;
}
/**
* Check whether a given path is a real Node.js binary (not the Electron binary
* pretending to be Node via ELECTRON_RUN_AS_NODE).
*/
function isRealNode(nodePath) {
if (!nodePath) return false;
try {
const out = execSync(
`"${nodePath}" -e "process.stdout.write(String(!process.versions.electron))"`,
{
encoding: "utf8",
timeout: 5000,
env: { ...process.env, ELECTRON_RUN_AS_NODE: undefined },
stdio: ["ignore", "pipe", "ignore"],
},
).trim();
return out === "true";
} catch (_) {
return false;
}
}
/**
* Find the system Node.js binary.
*
* @returns {{ nodePath: string } | null} The absolute path to system `node`,
* or null if none found. The caller should fall back to WASM when null.
*/
function findSystemNode() {
// 1. Try `which node` / `where node` first (respects user's PATH / shims)
const fromPath = whichNode();
if (fromPath && isRealNode(fromPath)) {
return { nodePath: fromPath };
}
// 2. Try nvm (has multiple version dirs)
const fromNvm = findNvmNode();
if (fromNvm && isRealNode(fromNvm)) {
return { nodePath: fromNvm };
}
// 3. Walk the well-known paths for the current platform
const platform = process.platform;
const candidates = KNOWN_PATHS[platform] || KNOWN_PATHS.linux;
for (const candidate of candidates) {
// Skip the nvm root — already handled above
if (candidate.includes(".nvm/versions/node")) continue;
if (fs.existsSync(candidate) && isRealNode(candidate)) {
return { nodePath: candidate };
}
}
return null;
}
/** Cache so we only search once per process lifetime. */
let _cached = undefined;
/**
* Cached version of `findSystemNode()`.
* @returns {{ nodePath: string } | null}
*/
function getSystemNode() {
if (_cached === undefined) {
_cached = findSystemNode();
if (_cached) {
console.log("[nodeDetect] Found system Node.js:", _cached.nodePath);
} else {
console.warn(
"[nodeDetect] No system Node.js found — will fall back to WASM backend",
);
}
}
return _cached;
}
module.exports = { findSystemNode, getSystemNode, isRealNode };

View File

@ -1,404 +0,0 @@
const { BrowserWindow } = require('electron');
const WebSocket = require('ws');
// OpenAI Realtime API implementation
// Documentation: https://platform.openai.com/docs/api-reference/realtime
let ws = null;
let isUserClosing = false;
let sessionParams = null;
let reconnectAttempts = 0;
const MAX_RECONNECT_ATTEMPTS = 3;
const RECONNECT_DELAY = 2000;
// Message buffer for accumulating responses
let messageBuffer = '';
let currentTranscription = '';
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function buildContextMessage(conversationHistory) {
const lastTurns = conversationHistory.slice(-20);
const validTurns = lastTurns.filter(turn => turn.transcription?.trim() && turn.ai_response?.trim());
if (validTurns.length === 0) return null;
const contextLines = validTurns.map(turn => `User: ${turn.transcription.trim()}\nAssistant: ${turn.ai_response.trim()}`);
return `Session reconnected. Here's the conversation so far:\n\n${contextLines.join('\n\n')}\n\nContinue from here.`;
}
async function initializeOpenAISession(config, conversationHistory = []) {
const { apiKey, baseUrl, systemPrompt, model, language, isReconnect } = config;
if (!isReconnect) {
sessionParams = config;
reconnectAttempts = 0;
sendToRenderer('session-initializing', true);
}
// Use custom baseURL or default OpenAI endpoint
const wsUrl = baseUrl || 'wss://api.openai.com/v1/realtime';
const fullUrl = `${wsUrl}?model=${model || 'gpt-4o-realtime-preview-2024-12-17'}`;
return new Promise((resolve, reject) => {
try {
ws = new WebSocket(fullUrl, {
headers: {
Authorization: `Bearer ${apiKey}`,
'OpenAI-Beta': 'realtime=v1',
},
});
ws.on('open', () => {
console.log('OpenAI Realtime connection established');
// Configure session
const sessionConfig = {
type: 'session.update',
session: {
modalities: ['text', 'audio'],
instructions: systemPrompt,
voice: 'alloy',
input_audio_format: 'pcm16',
output_audio_format: 'pcm16',
input_audio_transcription: {
model: 'whisper-1',
},
turn_detection: {
type: 'server_vad',
threshold: 0.5,
prefix_padding_ms: 300,
silence_duration_ms: 500,
},
temperature: 0.8,
max_response_output_tokens: 4096,
},
};
ws.send(JSON.stringify(sessionConfig));
// Restore context if reconnecting
if (isReconnect && conversationHistory.length > 0) {
const contextMessage = buildContextMessage(conversationHistory);
if (contextMessage) {
ws.send(
JSON.stringify({
type: 'conversation.item.create',
item: {
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: contextMessage }],
},
})
);
ws.send(JSON.stringify({ type: 'response.create' }));
}
}
sendToRenderer('update-status', 'Connected to OpenAI');
if (!isReconnect) {
sendToRenderer('session-initializing', false);
}
resolve(ws);
});
ws.on('message', data => {
try {
const event = JSON.parse(data.toString());
handleOpenAIEvent(event);
} catch (error) {
console.error('Error parsing OpenAI message:', error);
}
});
ws.on('error', error => {
console.error('OpenAI WebSocket error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
reject(error);
});
ws.on('close', (code, reason) => {
console.log(`OpenAI WebSocket closed: ${code} - ${reason}`);
if (isUserClosing) {
isUserClosing = false;
sendToRenderer('update-status', 'Session closed');
return;
}
// Attempt reconnection
if (sessionParams && reconnectAttempts < MAX_RECONNECT_ATTEMPTS) {
attemptReconnect(conversationHistory);
} else {
sendToRenderer('update-status', 'Session closed');
}
});
} catch (error) {
console.error('Failed to initialize OpenAI session:', error);
if (!isReconnect) {
sendToRenderer('session-initializing', false);
}
reject(error);
}
});
}
function handleOpenAIEvent(event) {
console.log('OpenAI event:', event.type);
switch (event.type) {
case 'session.created':
console.log('Session created:', event.session.id);
break;
case 'session.updated':
console.log('Session updated');
sendToRenderer('update-status', 'Listening...');
break;
case 'input_audio_buffer.speech_started':
console.log('Speech started');
break;
case 'input_audio_buffer.speech_stopped':
console.log('Speech stopped');
break;
case 'conversation.item.input_audio_transcription.completed':
if (event.transcript) {
currentTranscription += event.transcript;
console.log('Transcription:', event.transcript);
}
break;
case 'response.audio_transcript.delta':
if (event.delta) {
const isNewResponse = messageBuffer === '';
messageBuffer += event.delta;
sendToRenderer(isNewResponse ? 'new-response' : 'update-response', messageBuffer);
}
break;
case 'response.audio_transcript.done':
console.log('Audio transcript complete');
break;
case 'response.text.delta':
if (event.delta) {
const isNewResponse = messageBuffer === '';
messageBuffer += event.delta;
sendToRenderer(isNewResponse ? 'new-response' : 'update-response', messageBuffer);
}
break;
case 'response.done':
if (messageBuffer.trim() !== '') {
sendToRenderer('update-response', messageBuffer);
// Send conversation turn to be saved
if (currentTranscription) {
sendToRenderer('save-conversation-turn-data', {
transcription: currentTranscription,
response: messageBuffer,
});
currentTranscription = '';
}
}
messageBuffer = '';
sendToRenderer('update-status', 'Listening...');
break;
case 'error':
console.error('OpenAI error:', event.error);
sendToRenderer('update-status', 'Error: ' + event.error.message);
break;
default:
// console.log('Unhandled event type:', event.type);
break;
}
}
async function attemptReconnect(conversationHistory) {
reconnectAttempts++;
console.log(`Reconnection attempt ${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS}`);
messageBuffer = '';
currentTranscription = '';
sendToRenderer('update-status', `Reconnecting... (${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS})`);
await new Promise(resolve => setTimeout(resolve, RECONNECT_DELAY));
try {
const newConfig = { ...sessionParams, isReconnect: true };
ws = await initializeOpenAISession(newConfig, conversationHistory);
sendToRenderer('update-status', 'Reconnected! Listening...');
console.log('OpenAI session reconnected successfully');
return true;
} catch (error) {
console.error(`Reconnection attempt ${reconnectAttempts} failed:`, error);
if (reconnectAttempts < MAX_RECONNECT_ATTEMPTS) {
return attemptReconnect(conversationHistory);
}
console.log('Max reconnection attempts reached');
sendToRenderer('reconnect-failed', {
message: 'Tried 3 times to reconnect to OpenAI. Check your connection and API key.',
});
sessionParams = null;
return false;
}
}
async function sendAudioToOpenAI(base64Data) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
console.error('WebSocket not connected');
return { success: false, error: 'No active connection' };
}
try {
ws.send(
JSON.stringify({
type: 'input_audio_buffer.append',
audio: base64Data,
})
);
return { success: true };
} catch (error) {
console.error('Error sending audio to OpenAI:', error);
return { success: false, error: error.message };
}
}
async function sendTextToOpenAI(text) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
console.error('WebSocket not connected');
return { success: false, error: 'No active connection' };
}
try {
// Create a conversation item with user text
ws.send(
JSON.stringify({
type: 'conversation.item.create',
item: {
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: text }],
},
})
);
// Trigger response generation
ws.send(JSON.stringify({ type: 'response.create' }));
return { success: true };
} catch (error) {
console.error('Error sending text to OpenAI:', error);
return { success: false, error: error.message };
}
}
async function sendImageToOpenAI(base64Data, prompt, config) {
const { apiKey, baseUrl, model } = config;
// OpenAI doesn't support images in Realtime API yet, use standard Chat Completions
const apiEndpoint = baseUrl
? `${baseUrl.replace('wss://', 'https://').replace('/v1/realtime', '')}/v1/chat/completions`
: 'https://api.openai.com/v1/chat/completions';
try {
const response = await fetch(apiEndpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: model || 'gpt-4o',
messages: [
{
role: 'user',
content: [
{ type: 'text', text: prompt },
{
type: 'image_url',
image_url: {
url: `data:image/jpeg;base64,${base64Data}`,
},
},
],
},
],
max_tokens: 4096,
stream: true,
}),
});
if (!response.ok) {
const error = await response.text();
throw new Error(`OpenAI API error: ${response.status} - ${error}`);
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let fullText = '';
let isFirst = true;
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split('\n').filter(line => line.trim().startsWith('data: '));
for (const line of lines) {
const data = line.replace('data: ', '');
if (data === '[DONE]') continue;
try {
const json = JSON.parse(data);
const content = json.choices[0]?.delta?.content;
if (content) {
fullText += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullText);
isFirst = false;
}
} catch (e) {
// Skip invalid JSON
}
}
}
return { success: true, text: fullText, model: model || 'gpt-4o' };
} catch (error) {
console.error('Error sending image to OpenAI:', error);
return { success: false, error: error.message };
}
}
function closeOpenAISession() {
isUserClosing = true;
sessionParams = null;
if (ws) {
ws.close();
ws = null;
}
}
module.exports = {
initializeOpenAISession,
sendAudioToOpenAI,
sendTextToOpenAI,
sendImageToOpenAI,
closeOpenAISession,
};

View File

@ -1,820 +0,0 @@
const { BrowserWindow } = require('electron');
const fs = require('fs');
const path = require('path');
const os = require('os');
const { spawn } = require('child_process');
// OpenAI SDK will be loaded dynamically
let OpenAI = null;
// OpenAI SDK-based provider (for BotHub, Azure, and other OpenAI-compatible APIs)
// This uses the standard Chat Completions API with Whisper for transcription
let openaiClient = null;
let currentConfig = null;
let conversationMessages = [];
let isProcessing = false;
let audioInputMode = 'auto';
let isPushToTalkActive = false;
// macOS audio capture
let systemAudioProc = null;
let audioBuffer = Buffer.alloc(0);
let transcriptionTimer = null;
const TRANSCRIPTION_INTERVAL_MS = 3000; // Transcribe every 3 seconds
const MIN_AUDIO_DURATION_MS = 500; // Minimum audio duration to transcribe
const SAMPLE_RATE = 24000;
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
async function initializeOpenAISDK(config) {
const { apiKey, baseUrl, model } = config;
if (!apiKey) {
throw new Error('OpenAI API key is required');
}
// Dynamic import for ES module
if (!OpenAI) {
const openaiModule = await import('openai');
OpenAI = openaiModule.default;
}
const clientConfig = {
apiKey: apiKey,
};
// Use custom baseURL if provided
if (baseUrl && baseUrl.trim() !== '') {
clientConfig.baseURL = baseUrl;
}
openaiClient = new OpenAI(clientConfig);
currentConfig = config;
conversationMessages = [];
console.log('OpenAI SDK initialized with baseURL:', clientConfig.baseURL || 'default');
sendToRenderer('update-status', 'Ready (OpenAI SDK)');
return true;
}
function setSystemPrompt(systemPrompt) {
// Clear conversation and set system prompt
conversationMessages = [];
if (systemPrompt) {
conversationMessages.push({
role: 'system',
content: systemPrompt,
});
}
}
// Create WAV file from raw PCM data
function createWavBuffer(pcmBuffer, sampleRate = 24000, numChannels = 1, bitsPerSample = 16) {
const byteRate = sampleRate * numChannels * (bitsPerSample / 8);
const blockAlign = numChannels * (bitsPerSample / 8);
const dataSize = pcmBuffer.length;
const headerSize = 44;
const fileSize = headerSize + dataSize - 8;
const wavBuffer = Buffer.alloc(headerSize + dataSize);
// RIFF header
wavBuffer.write('RIFF', 0);
wavBuffer.writeUInt32LE(fileSize, 4);
wavBuffer.write('WAVE', 8);
// fmt chunk
wavBuffer.write('fmt ', 12);
wavBuffer.writeUInt32LE(16, 16); // fmt chunk size
wavBuffer.writeUInt16LE(1, 20); // audio format (1 = PCM)
wavBuffer.writeUInt16LE(numChannels, 22);
wavBuffer.writeUInt32LE(sampleRate, 24);
wavBuffer.writeUInt32LE(byteRate, 28);
wavBuffer.writeUInt16LE(blockAlign, 32);
wavBuffer.writeUInt16LE(bitsPerSample, 34);
// data chunk
wavBuffer.write('data', 36);
wavBuffer.writeUInt32LE(dataSize, 40);
// Copy PCM data
pcmBuffer.copy(wavBuffer, 44);
return wavBuffer;
}
async function transcribeAudio(audioBuffer, mimeType = 'audio/wav') {
if (!openaiClient) {
throw new Error('OpenAI client not initialized');
}
try {
// Save audio buffer to temp file (OpenAI SDK requires file path)
const tempDir = os.tmpdir();
const tempFile = path.join(tempDir, `audio_${Date.now()}.wav`);
// Convert base64 to buffer if needed
let buffer = audioBuffer;
if (typeof audioBuffer === 'string') {
buffer = Buffer.from(audioBuffer, 'base64');
}
// Create proper WAV file with header
const wavBuffer = createWavBuffer(buffer, SAMPLE_RATE, 1, 16);
fs.writeFileSync(tempFile, wavBuffer);
const transcription = await openaiClient.audio.transcriptions.create({
file: fs.createReadStream(tempFile),
model: currentConfig.whisperModel || 'whisper-1',
response_format: 'text',
});
// Clean up temp file
try {
fs.unlinkSync(tempFile);
} catch (e) {
// Ignore cleanup errors
}
return transcription;
} catch (error) {
console.error('Transcription error:', error);
throw error;
}
}
async function sendTextMessage(text) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
if (isProcessing) {
return { success: false, error: 'Already processing a request' };
}
isProcessing = true;
try {
// Add user message to conversation
conversationMessages.push({
role: 'user',
content: text,
});
sendToRenderer('update-status', 'Thinking...');
const stream = await openaiClient.chat.completions.create({
model: currentConfig.model || 'gpt-4o',
messages: conversationMessages,
stream: true,
max_tokens: 4096,
});
let fullResponse = '';
let isFirst = true;
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
fullResponse += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullResponse);
isFirst = false;
}
}
// Add assistant response to conversation
conversationMessages.push({
role: 'assistant',
content: fullResponse,
});
sendToRenderer('update-status', 'Ready');
isProcessing = false;
return { success: true, text: fullResponse };
} catch (error) {
console.error('Chat completion error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
isProcessing = false;
return { success: false, error: error.message };
}
}
async function sendImageMessage(base64Image, prompt) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
if (isProcessing) {
return { success: false, error: 'Already processing a request' };
}
isProcessing = true;
try {
sendToRenderer('update-status', 'Analyzing image...');
const messages = [
...conversationMessages,
{
role: 'user',
content: [
{ type: 'text', text: prompt },
{
type: 'image_url',
image_url: {
url: `data:image/jpeg;base64,${base64Image}`,
},
},
],
},
];
const stream = await openaiClient.chat.completions.create({
model: currentConfig.visionModel || currentConfig.model || 'gpt-4o',
messages: messages,
stream: true,
max_tokens: 4096,
});
let fullResponse = '';
let isFirst = true;
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
fullResponse += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullResponse);
isFirst = false;
}
}
// Add to conversation history (text only for follow-ups)
conversationMessages.push({
role: 'user',
content: prompt,
});
conversationMessages.push({
role: 'assistant',
content: fullResponse,
});
sendToRenderer('update-status', 'Ready');
isProcessing = false;
return { success: true, text: fullResponse, model: currentConfig.visionModel || currentConfig.model };
} catch (error) {
console.error('Vision error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
isProcessing = false;
return { success: false, error: error.message };
}
}
// Process audio chunk and get response
// This accumulates audio and transcribes when silence is detected or timer expires
let audioChunks = [];
let lastAudioTime = 0;
let firstChunkTime = 0;
const SILENCE_THRESHOLD_MS = 1500; // 1.5 seconds of silence
const MAX_BUFFER_DURATION_MS = 5000; // 5 seconds max buffering before forced transcription
let silenceCheckTimer = null;
let windowsTranscriptionTimer = null;
async function processAudioChunk(base64Audio, mimeType) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
const now = Date.now();
const buffer = Buffer.from(base64Audio, 'base64');
if (audioInputMode === 'push-to-talk') {
if (!isPushToTalkActive) {
return { success: true, ignored: true };
}
// In push-to-talk mode we only buffer while active
audioChunks.push(buffer);
lastAudioTime = now;
return { success: true, buffering: true };
}
// Track first chunk time for duration-based flushing
if (audioChunks.length === 0) {
firstChunkTime = now;
// Start periodic transcription timer (Windows needs this)
if (!windowsTranscriptionTimer && process.platform === 'win32') {
console.log('Starting Windows periodic transcription timer...');
windowsTranscriptionTimer = setInterval(async () => {
if (audioChunks.length > 0) {
const bufferDuration = Date.now() - firstChunkTime;
if (bufferDuration >= MAX_BUFFER_DURATION_MS) {
console.log(`Periodic flush: ${bufferDuration}ms of audio buffered`);
await flushAudioAndTranscribe();
}
}
}, 2000); // Check every 2 seconds
}
}
// Add to audio buffer
audioChunks.push(buffer);
lastAudioTime = now;
// Clear existing timer
if (silenceCheckTimer) {
clearTimeout(silenceCheckTimer);
}
// Set timer to check for silence
silenceCheckTimer = setTimeout(async () => {
const silenceDuration = Date.now() - lastAudioTime;
if (silenceDuration >= SILENCE_THRESHOLD_MS && audioChunks.length > 0) {
console.log('Silence detected, flushing audio for transcription...');
await flushAudioAndTranscribe();
}
}, SILENCE_THRESHOLD_MS);
return { success: true, buffering: true };
}
async function flushAudioAndTranscribe() {
if (audioChunks.length === 0) {
return { success: true, text: '' };
}
// Clear Windows transcription timer
if (windowsTranscriptionTimer) {
clearInterval(windowsTranscriptionTimer);
windowsTranscriptionTimer = null;
}
try {
// Combine all audio chunks
const combinedBuffer = Buffer.concat(audioChunks);
const chunkCount = audioChunks.length;
audioChunks = [];
firstChunkTime = 0;
// Calculate audio duration
const bytesPerSample = 2;
const audioDurationMs = (combinedBuffer.length / bytesPerSample / SAMPLE_RATE) * 1000;
console.log(`Transcribing ${chunkCount} chunks (${audioDurationMs.toFixed(0)}ms of audio)...`);
// Transcribe
const transcription = await transcribeAudio(combinedBuffer);
if (transcription && transcription.trim()) {
console.log('Transcription result:', transcription);
// Send to chat
const response = await sendTextMessage(transcription);
return {
success: true,
transcription: transcription,
response: response.text,
};
}
return { success: true, text: '' };
} catch (error) {
console.error('Flush audio error:', error);
return { success: false, error: error.message };
}
}
function notifyPushToTalkState() {
sendToRenderer('push-to-talk-state', {
active: isPushToTalkActive,
inputMode: audioInputMode,
});
}
function resetRealtimeAudioBuffer() {
audioChunks = [];
firstChunkTime = 0;
lastAudioTime = 0;
if (silenceCheckTimer) {
clearTimeout(silenceCheckTimer);
silenceCheckTimer = null;
}
if (windowsTranscriptionTimer) {
clearInterval(windowsTranscriptionTimer);
windowsTranscriptionTimer = null;
}
}
function updateTranscriptionTimerForPushToTalk() {
if (audioInputMode === 'push-to-talk') {
stopTranscriptionTimer();
return;
}
if (systemAudioProc && !transcriptionTimer) {
startTranscriptionTimer();
}
}
async function setPushToTalkActive(active) {
const wasActive = isPushToTalkActive;
isPushToTalkActive = active;
if (active) {
// Starting recording - clear any old buffers
resetRealtimeAudioBuffer();
audioBuffer = Buffer.alloc(0);
console.log('Push-to-Talk: Recording started');
sendToRenderer('update-status', 'Recording...');
}
notifyPushToTalkState();
// When user stops recording in PTT mode, send audio for transcription
if (!active && wasActive && audioInputMode === 'push-to-talk') {
console.log('Push-to-Talk: Recording stopped, transcribing...');
sendToRenderer('update-status', 'Transcribing...');
// For browser-based audio (Windows)
if (audioChunks.length > 0) {
await flushAudioAndTranscribe();
}
// For macOS SystemAudioDump
if (audioBuffer.length > 0) {
await transcribeBufferedAudio(true); // Force transcription
}
sendToRenderer('update-status', 'Listening...');
}
}
async function togglePushToTalk() {
if (isPushToTalkActive) {
await setPushToTalkActive(false);
} else {
await setPushToTalkActive(true);
}
}
function updatePushToTalkSettings(inputMode) {
if (inputMode) {
audioInputMode = inputMode;
}
if (audioInputMode !== 'push-to-talk' && isPushToTalkActive) {
isPushToTalkActive = false;
}
if (audioInputMode !== 'push-to-talk') {
resetRealtimeAudioBuffer();
audioBuffer = Buffer.alloc(0);
}
notifyPushToTalkState();
updateTranscriptionTimerForPushToTalk();
}
function clearConversation() {
const systemMessage = conversationMessages.find(m => m.role === 'system');
conversationMessages = systemMessage ? [systemMessage] : [];
audioChunks = [];
// Clear timers
if (silenceCheckTimer) {
clearTimeout(silenceCheckTimer);
silenceCheckTimer = null;
}
if (windowsTranscriptionTimer) {
clearInterval(windowsTranscriptionTimer);
windowsTranscriptionTimer = null;
}
}
function closeOpenAISDK() {
stopMacOSAudioCapture();
openaiClient = null;
currentConfig = null;
conversationMessages = [];
audioChunks = [];
isProcessing = false;
isPushToTalkActive = false;
// Clear timers
if (silenceCheckTimer) {
clearTimeout(silenceCheckTimer);
silenceCheckTimer = null;
}
if (windowsTranscriptionTimer) {
clearInterval(windowsTranscriptionTimer);
windowsTranscriptionTimer = null;
}
notifyPushToTalkState();
sendToRenderer('update-status', 'Disconnected');
}
// ============ macOS Audio Capture ============
async function killExistingSystemAudioDump() {
return new Promise(resolve => {
const { exec } = require('child_process');
exec('pkill -f SystemAudioDump', error => {
// Ignore errors (process might not exist)
setTimeout(resolve, 100);
});
});
}
function convertStereoToMono(stereoBuffer) {
const samples = stereoBuffer.length / 4;
const monoBuffer = Buffer.alloc(samples * 2);
for (let i = 0; i < samples; i++) {
const leftSample = stereoBuffer.readInt16LE(i * 4);
monoBuffer.writeInt16LE(leftSample, i * 2);
}
return monoBuffer;
}
// Calculate RMS (Root Mean Square) volume level of audio buffer
function calculateRMS(buffer) {
const samples = buffer.length / 2;
if (samples === 0) return 0;
let sumSquares = 0;
for (let i = 0; i < samples; i++) {
const sample = buffer.readInt16LE(i * 2);
sumSquares += sample * sample;
}
return Math.sqrt(sumSquares / samples);
}
// Check if audio contains speech (simple VAD based on volume threshold)
function hasSpeech(buffer, threshold = 500) {
const rms = calculateRMS(buffer);
return rms > threshold;
}
async function transcribeBufferedAudio(forcePTT = false) {
if (audioBuffer.length === 0 || isProcessing) {
return;
}
// In push-to-talk mode, only transcribe when explicitly requested (forcePTT=true)
if (audioInputMode === 'push-to-talk' && !forcePTT) {
return;
}
// Calculate audio duration
const bytesPerSample = 2;
const audioDurationMs = (audioBuffer.length / bytesPerSample / SAMPLE_RATE) * 1000;
if (audioDurationMs < MIN_AUDIO_DURATION_MS) {
return; // Not enough audio
}
// Check if there's actual speech in the audio (Voice Activity Detection)
// Skip VAD check in PTT mode - user explicitly wants to transcribe
if (!forcePTT && !hasSpeech(audioBuffer)) {
// Clear buffer if it's just silence/noise
audioBuffer = Buffer.alloc(0);
return;
}
// Take current buffer and reset
const currentBuffer = audioBuffer;
audioBuffer = Buffer.alloc(0);
try {
console.log(`Transcribing ${audioDurationMs.toFixed(0)}ms of audio...`);
if (!forcePTT) {
sendToRenderer('update-status', 'Transcribing...');
}
const transcription = await transcribeAudio(currentBuffer, 'audio/wav');
if (transcription && transcription.trim() && transcription.trim().length > 2) {
console.log('Transcription:', transcription);
sendToRenderer('update-status', 'Processing...');
// Send to chat
await sendTextMessage(transcription);
} else if (forcePTT) {
console.log('Push-to-Talk: No speech detected in recording');
}
if (!forcePTT) {
sendToRenderer('update-status', 'Listening...');
}
} catch (error) {
console.error('Transcription error:', error);
if (!forcePTT) {
sendToRenderer('update-status', 'Listening...');
}
}
}
async function startMacOSAudioCapture() {
if (process.platform !== 'darwin') return false;
// Kill any existing SystemAudioDump processes first
await killExistingSystemAudioDump();
console.log('=== Starting macOS audio capture (OpenAI SDK) ===');
sendToRenderer('update-status', 'Starting audio capture...');
const { app } = require('electron');
const fs = require('fs');
let systemAudioPath;
if (app.isPackaged) {
systemAudioPath = path.join(process.resourcesPath, 'SystemAudioDump');
} else {
systemAudioPath = path.join(__dirname, '../assets', 'SystemAudioDump');
}
console.log('SystemAudioDump config:', {
path: systemAudioPath,
isPackaged: app.isPackaged,
resourcesPath: process.resourcesPath,
exists: fs.existsSync(systemAudioPath),
});
// Check if file exists
if (!fs.existsSync(systemAudioPath)) {
console.error('FATAL: SystemAudioDump not found at:', systemAudioPath);
sendToRenderer('update-status', 'Error: Audio binary not found');
return false;
}
// Check and fix executable permissions
try {
fs.accessSync(systemAudioPath, fs.constants.X_OK);
console.log('SystemAudioDump is executable');
} catch (err) {
console.warn('SystemAudioDump not executable, fixing permissions...');
try {
fs.chmodSync(systemAudioPath, 0o755);
console.log('Fixed executable permissions');
} catch (chmodErr) {
console.error('Failed to fix permissions:', chmodErr);
sendToRenderer('update-status', 'Error: Cannot execute audio binary');
return false;
}
}
const spawnOptions = {
stdio: ['ignore', 'pipe', 'pipe'],
env: {
...process.env,
},
};
console.log('Spawning SystemAudioDump...');
systemAudioProc = spawn(systemAudioPath, [], spawnOptions);
if (!systemAudioProc.pid) {
console.error('FATAL: Failed to start SystemAudioDump - no PID');
sendToRenderer('update-status', 'Error: Audio capture failed to start');
return false;
}
console.log('SystemAudioDump started with PID:', systemAudioProc.pid);
const CHUNK_DURATION = 0.1;
const BYTES_PER_SAMPLE = 2;
const CHANNELS = 2;
const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION;
let tempBuffer = Buffer.alloc(0);
let chunkCount = 0;
let firstDataReceived = false;
systemAudioProc.stdout.on('data', data => {
if (!firstDataReceived) {
firstDataReceived = true;
console.log('First audio data received! Size:', data.length);
sendToRenderer('update-status', 'Listening...');
}
tempBuffer = Buffer.concat([tempBuffer, data]);
while (tempBuffer.length >= CHUNK_SIZE) {
const chunk = tempBuffer.slice(0, CHUNK_SIZE);
tempBuffer = tempBuffer.slice(CHUNK_SIZE);
// Convert stereo to mono
const monoChunk = CHANNELS === 2 ? convertStereoToMono(chunk) : chunk;
if (audioInputMode === 'push-to-talk' && !isPushToTalkActive) {
continue;
}
// Add to audio buffer for transcription
audioBuffer = Buffer.concat([audioBuffer, monoChunk]);
chunkCount++;
if (chunkCount % 100 === 0) {
console.log(`Audio: ${chunkCount} chunks processed, buffer size: ${audioBuffer.length}`);
}
}
// Limit buffer size (max 30 seconds of audio)
const maxBufferSize = SAMPLE_RATE * BYTES_PER_SAMPLE * 30;
if (audioBuffer.length > maxBufferSize) {
audioBuffer = audioBuffer.slice(-maxBufferSize);
}
});
systemAudioProc.stderr.on('data', data => {
const msg = data.toString();
console.error('SystemAudioDump stderr:', msg);
if (msg.toLowerCase().includes('error')) {
sendToRenderer('update-status', 'Audio error: ' + msg.substring(0, 50));
}
});
systemAudioProc.on('close', (code, signal) => {
console.log('SystemAudioDump closed:', { code, signal, chunksProcessed: chunkCount, tempBufferSize: tempBuffer.length });
if (code !== 0 && code !== null) {
sendToRenderer('update-status', `Audio stopped (exit: ${code}, signal: ${signal})`);
}
systemAudioProc = null;
stopTranscriptionTimer();
});
systemAudioProc.on('error', err => {
console.error('SystemAudioDump spawn error:', err.message, err.stack);
sendToRenderer('update-status', 'Audio error: ' + err.message);
systemAudioProc = null;
stopTranscriptionTimer();
});
systemAudioProc.on('exit', (code, signal) => {
console.log('SystemAudioDump exit event:', { code, signal });
});
// Start periodic transcription
updateTranscriptionTimerForPushToTalk();
sendToRenderer('update-status', 'Listening...');
return true;
}
function startTranscriptionTimer() {
// Don't start auto-transcription timer in push-to-talk mode
if (audioInputMode === 'push-to-talk') {
return;
}
stopTranscriptionTimer();
transcriptionTimer = setInterval(transcribeBufferedAudio, TRANSCRIPTION_INTERVAL_MS);
}
function stopTranscriptionTimer() {
if (transcriptionTimer) {
clearInterval(transcriptionTimer);
transcriptionTimer = null;
}
}
function stopMacOSAudioCapture() {
stopTranscriptionTimer();
if (systemAudioProc) {
console.log('Stopping SystemAudioDump for OpenAI SDK...');
systemAudioProc.kill('SIGTERM');
systemAudioProc = null;
}
audioBuffer = Buffer.alloc(0);
}
module.exports = {
initializeOpenAISDK,
setSystemPrompt,
transcribeAudio,
sendTextMessage,
sendImageMessage,
processAudioChunk,
flushAudioAndTranscribe,
togglePushToTalk,
updatePushToTalkSettings,
clearConversation,
closeOpenAISDK,
startMacOSAudioCapture,
stopMacOSAudioCapture,
};

View File

@ -1,21 +1,45 @@
const profilePrompts = { const responseModeFormats = {
interview: { brief: `**RESPONSE FORMAT REQUIREMENTS:**
intro: `You are an AI-powered interview assistant, designed to act as a discreet on-screen teleprompter. Your mission is to help the user excel in their job interview by providing concise, impactful, and ready-to-speak answers or key talking points. Analyze the ongoing interview dialogue and, crucially, the 'User-provided context' below.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:**
- Keep responses SHORT and CONCISE (1-3 sentences max) - Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability - Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis - Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate - Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`, - Focus on the most essential information only
- EXCEPTION: If a coding/algorithm task is detected, ALWAYS provide the complete working code (see CODING TASKS below)`,
searchUsage: `**SEARCH TOOL USAGE:** detailed: `**RESPONSE FORMAT REQUIREMENTS:**
- Provide a THOROUGH and COMPREHENSIVE response with full explanations
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use headers (##) to organize sections when appropriate
- Use bullet points (-) for lists when appropriate
- Include relevant context, edge cases, and reasoning
- For technical topics, explain the "why" behind each point
- No length restriction be as detailed as needed to fully answer the question`,
};
const codingAwareness = `**CODING TASKS — CRITICAL INSTRUCTION:**
When the interviewer/questioner asks to solve a coding problem, implement an algorithm, debug code, do a live coding exercise, open an IDE and write code, or any task that requires a code solution:
- You MUST provide the ACTUAL COMPLETE WORKING CODE SOLUTION
- NEVER respond with meta-advice like "now you should write code" or "prepare to implement" or "think about the approach"
- NEVER say "open your IDE" or "start coding" instead, GIVE THE CODE
- In brief mode: provide 2-3 bullet approach points, then the FULL working code with comments
- In detailed mode: explain approach, time/space complexity, edge cases, then the FULL working code with comments
- Include the programming language name in the code fence (e.g. \`\`\`python, \`\`\`javascript)
- If the language is not specified, default to Python
- The code must be complete, runnable, and correct`;
const profilePrompts = {
interview: {
intro: `You are an AI-powered interview assistant, designed to act as a discreet on-screen teleprompter. Your mission is to help the user excel in their job interview by providing concise, impactful, and ready-to-speak answers or key talking points. Analyze the ongoing interview dialogue and, crucially, the 'User-provided context' below.`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the interviewer mentions **recent events, news, or current trends** (anything from the last 6 months), **ALWAYS use Google search** to get up-to-date information - If the interviewer mentions **recent events, news, or current trends** (anything from the last 6 months), **ALWAYS use Google search** to get up-to-date information
- If they ask about **company-specific information, recent acquisitions, funding, or leadership changes**, use Google search first - If they ask about **company-specific information, recent acquisitions, funding, or leadership changes**, use Google search first
- If they mention **new technologies, frameworks, or industry developments**, search for the latest information - If they mention **new technologies, frameworks, or industry developments**, search for the latest information
- After searching, provide a **concise, informed response** based on the real-time data`, - After searching, provide a **concise, informed response** based on the real-time data`,
content: `Focus on delivering the most essential information the user needs. Your suggestions should be direct and immediately usable. content: `Focus on delivering the most essential information the user needs. Your suggestions should be direct and immediately usable.
To help the user 'crack' the interview in their specific field: To help the user 'crack' the interview in their specific field:
1. Heavily rely on the 'User-provided context' (e.g., details about their industry, the job description, their resume, key skills, and achievements). 1. Heavily rely on the 'User-provided context' (e.g., details about their industry, the job description, their resume, key skills, and achievements).
@ -32,27 +56,20 @@ You: "I've been working with React for 4 years, building everything from simple
Interviewer: "Why do you want to work here?" Interviewer: "Why do you want to work here?"
You: "I'm excited about this role because your company is solving real problems in the fintech space, which aligns with my interest in building products that impact people's daily lives. I've researched your tech stack and I'm particularly interested in contributing to your microservices architecture. Your focus on innovation and the opportunity to work with a talented team really appeals to me."`, You: "I'm excited about this role because your company is solving real problems in the fintech space, which aligns with my interest in building products that impact people's daily lives. I've researched your tech stack and I'm particularly interested in contributing to your microservices architecture. Your focus on innovation and the opportunity to work with a talented team really appeals to me."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. No coaching, no "you should" statements, no explanations - just the direct response the candidate can speak immediately. Keep it **short and impactful**.`, Provide only the exact words to say in **markdown format**. No coaching, no "you should" statements, no explanations - just the direct response the candidate can speak immediately. Keep it **short and impactful**.`,
}, },
sales: { sales: {
intro: `You are a sales call assistant. Your job is to provide the exact words the salesperson should say to prospects during sales calls. Give direct, ready-to-speak responses that are persuasive and professional.`, intro: `You are a sales call assistant. Your job is to provide the exact words the salesperson should say to prospects during sales calls. Give direct, ready-to-speak responses that are persuasive and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the prospect mentions **recent industry trends, market changes, or current events**, **ALWAYS use Google search** to get up-to-date information - If the prospect mentions **recent industry trends, market changes, or current events**, **ALWAYS use Google search** to get up-to-date information
- If they reference **competitor information, recent funding news, or market data**, search for the latest information first - If they reference **competitor information, recent funding news, or market data**, search for the latest information first
- If they ask about **new regulations, industry reports, or recent developments**, use search to provide accurate data - If they ask about **new regulations, industry reports, or recent developments**, use search to provide accurate data
- After searching, provide a **concise, informed response** that demonstrates current market knowledge`, - After searching, provide a **concise, informed response** that demonstrates current market knowledge`,
content: `Examples: content: `Examples:
Prospect: "Tell me about your product" Prospect: "Tell me about your product"
You: "Our platform helps companies like yours reduce operational costs by 30% while improving efficiency. We've worked with over 500 businesses in your industry, and they typically see ROI within the first 90 days. What specific operational challenges are you facing right now?" You: "Our platform helps companies like yours reduce operational costs by 30% while improving efficiency. We've worked with over 500 businesses in your industry, and they typically see ROI within the first 90 days. What specific operational challenges are you facing right now?"
@ -63,27 +80,20 @@ You: "Three key differentiators set us apart: First, our implementation takes ju
Prospect: "I need to think about it" Prospect: "I need to think about it"
You: "I completely understand this is an important decision. What specific concerns can I address for you today? Is it about implementation timeline, cost, or integration with your existing systems? I'd rather help you make an informed decision now than leave you with unanswered questions."`, You: "I completely understand this is an important decision. What specific concerns can I address for you today? Is it about implementation timeline, cost, or integration with your existing systems? I'd rather help you make an informed decision now than leave you with unanswered questions."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be persuasive but not pushy. Focus on value and addressing objections directly. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be persuasive but not pushy. Focus on value and addressing objections directly. Keep responses **short and impactful**.`,
}, },
meeting: { meeting: {
intro: `You are a meeting assistant. Your job is to provide the exact words to say during professional meetings, presentations, and discussions. Give direct, ready-to-speak responses that are clear and professional.`, intro: `You are a meeting assistant. Your job is to provide the exact words to say during professional meetings, presentations, and discussions. Give direct, ready-to-speak responses that are clear and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If participants mention **recent industry news, regulatory changes, or market updates**, **ALWAYS use Google search** for current information - If participants mention **recent industry news, regulatory changes, or market updates**, **ALWAYS use Google search** for current information
- If they reference **competitor activities, recent reports, or current statistics**, search for the latest data first - If they reference **competitor activities, recent reports, or current statistics**, search for the latest data first
- If they discuss **new technologies, tools, or industry developments**, use search to provide accurate insights - If they discuss **new technologies, tools, or industry developments**, use search to provide accurate insights
- After searching, provide a **concise, informed response** that adds value to the discussion`, - After searching, provide a **concise, informed response** that adds value to the discussion`,
content: `Examples: content: `Examples:
Participant: "What's the status on the project?" Participant: "What's the status on the project?"
You: "We're currently on track to meet our deadline. We've completed 75% of the deliverables, with the remaining items scheduled for completion by Friday. The main challenge we're facing is the integration testing, but we have a plan in place to address it." You: "We're currently on track to meet our deadline. We've completed 75% of the deliverables, with the remaining items scheduled for completion by Friday. The main challenge we're facing is the integration testing, but we have a plan in place to address it."
@ -94,27 +104,20 @@ You: "Absolutely. We're currently at 80% of our allocated budget with 20% of the
Participant: "What are the next steps?" Participant: "What are the next steps?"
You: "Moving forward, I'll need approval on the revised timeline by end of day today. Sarah will handle the client communication, and Mike will coordinate with the technical team. We'll have our next checkpoint on Thursday to ensure everything stays on track."`, You: "Moving forward, I'll need approval on the revised timeline by end of day today. Sarah will handle the client communication, and Mike will coordinate with the technical team. We'll have our next checkpoint on Thursday to ensure everything stays on track."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be clear, concise, and action-oriented in your responses. Keep it **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be clear, concise, and action-oriented in your responses. Keep it **short and impactful**.`,
}, },
presentation: { presentation: {
intro: `You are a presentation coach. Your job is to provide the exact words the presenter should say during presentations, pitches, and public speaking events. Give direct, ready-to-speak responses that are engaging and confident.`, intro: `You are a presentation coach. Your job is to provide the exact words the presenter should say during presentations, pitches, and public speaking events. Give direct, ready-to-speak responses that are engaging and confident.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the audience asks about **recent market trends, current statistics, or latest industry data**, **ALWAYS use Google search** for up-to-date information - If the audience asks about **recent market trends, current statistics, or latest industry data**, **ALWAYS use Google search** for up-to-date information
- If they reference **recent events, new competitors, or current market conditions**, search for the latest information first - If they reference **recent events, new competitors, or current market conditions**, search for the latest information first
- If they inquire about **recent studies, reports, or breaking news** in your field, use search to provide accurate data - If they inquire about **recent studies, reports, or breaking news** in your field, use search to provide accurate data
- After searching, provide a **concise, credible response** with current facts and figures`, - After searching, provide a **concise, credible response** with current facts and figures`,
content: `Examples: content: `Examples:
Audience: "Can you explain that slide again?" Audience: "Can you explain that slide again?"
You: "Of course. This slide shows our three-year growth trajectory. The blue line represents revenue, which has grown 150% year over year. The orange bars show our customer acquisition, doubling each year. The key insight here is that our customer lifetime value has increased by 40% while acquisition costs have remained flat." You: "Of course. This slide shows our three-year growth trajectory. The blue line represents revenue, which has grown 150% year over year. The orange bars show our customer acquisition, doubling each year. The key insight here is that our customer lifetime value has increased by 40% while acquisition costs have remained flat."
@ -125,27 +128,20 @@ You: "Great question. Our competitive advantage comes down to three core strengt
Audience: "How do you plan to scale?" Audience: "How do you plan to scale?"
You: "Our scaling strategy focuses on three pillars. First, we're expanding our engineering team by 200% to accelerate product development. Second, we're entering three new markets next quarter. Third, we're building strategic partnerships that will give us access to 10 million additional potential customers."`, You: "Our scaling strategy focuses on three pillars. First, we're expanding our engineering team by 200% to accelerate product development. Second, we're entering three new markets next quarter. Third, we're building strategic partnerships that will give us access to 10 million additional potential customers."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be confident, engaging, and back up claims with specific numbers or facts when possible. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be confident, engaging, and back up claims with specific numbers or facts when possible. Keep responses **short and impactful**.`,
}, },
negotiation: { negotiation: {
intro: `You are a negotiation assistant. Your job is to provide the exact words to say during business negotiations, contract discussions, and deal-making conversations. Give direct, ready-to-speak responses that are strategic and professional.`, intro: `You are a negotiation assistant. Your job is to provide the exact words to say during business negotiations, contract discussions, and deal-making conversations. Give direct, ready-to-speak responses that are strategic and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If they mention **recent market pricing, current industry standards, or competitor offers**, **ALWAYS use Google search** for current benchmarks - If they mention **recent market pricing, current industry standards, or competitor offers**, **ALWAYS use Google search** for current benchmarks
- If they reference **recent legal changes, new regulations, or market conditions**, search for the latest information first - If they reference **recent legal changes, new regulations, or market conditions**, search for the latest information first
- If they discuss **recent company news, financial performance, or industry developments**, use search to provide informed responses - If they discuss **recent company news, financial performance, or industry developments**, use search to provide informed responses
- After searching, provide a **strategic, well-informed response** that leverages current market intelligence`, - After searching, provide a **strategic, well-informed response** that leverages current market intelligence`,
content: `Examples: content: `Examples:
Other party: "That price is too high" Other party: "That price is too high"
You: "I understand your concern about the investment. Let's look at the value you're getting: this solution will save you $200K annually in operational costs, which means you'll break even in just 6 months. Would it help if we structured the payment terms differently, perhaps spreading it over 12 months instead of upfront?" You: "I understand your concern about the investment. Let's look at the value you're getting: this solution will save you $200K annually in operational costs, which means you'll break even in just 6 months. Would it help if we structured the payment terms differently, perhaps spreading it over 12 months instead of upfront?"
@ -156,27 +152,20 @@ You: "I appreciate your directness. We want this to work for both parties. Our c
Other party: "We're considering other options" Other party: "We're considering other options"
You: "That's smart business practice. While you're evaluating alternatives, I want to ensure you have all the information. Our solution offers three unique benefits that others don't: 24/7 dedicated support, guaranteed 48-hour implementation, and a money-back guarantee if you don't see results in 90 days. How important are these factors in your decision?"`, You: "That's smart business practice. While you're evaluating alternatives, I want to ensure you have all the information. Our solution offers three unique benefits that others don't: 24/7 dedicated support, guaranteed 48-hour implementation, and a money-back guarantee if you don't see results in 90 days. How important are these factors in your decision?"`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Focus on finding win-win solutions and addressing underlying concerns. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Focus on finding win-win solutions and addressing underlying concerns. Keep responses **short and impactful**.`,
}, },
exam: { exam: {
intro: `You are an exam assistant designed to help students pass tests efficiently. Your role is to provide direct, accurate answers to exam questions with minimal explanation - just enough to confirm the answer is correct.`, intro: `You are an exam assistant designed to help students pass tests efficiently. Your role is to provide direct, accurate answers to exam questions with minimal explanation - just enough to confirm the answer is correct.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-2 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for the answer choice/result
- Focus on the most essential information only
- Provide only brief justification for correctness`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the question involves **recent information, current events, or updated facts**, **ALWAYS use Google search** for the latest data - If the question involves **recent information, current events, or updated facts**, **ALWAYS use Google search** for the latest data
- If they reference **specific dates, statistics, or factual information** that might be outdated, search for current information - If they reference **specific dates, statistics, or factual information** that might be outdated, search for current information
- If they ask about **recent research, new theories, or updated methodologies**, search for the latest information - If they ask about **recent research, new theories, or updated methodologies**, search for the latest information
- After searching, provide **direct, accurate answers** with minimal explanation`, - After searching, provide **direct, accurate answers** with minimal explanation`,
content: `Focus on providing efficient exam assistance that helps students pass tests quickly. content: `Focus on providing efficient exam assistance that helps students pass tests quickly.
**Key Principles:** **Key Principles:**
1. **Answer the question directly** - no unnecessary explanations 1. **Answer the question directly** - no unnecessary explanations
@ -196,83 +185,62 @@ You: "**Question**: Which of the following is a primary color? A) Green B) Red C
Question: "Solve for x: 2x + 5 = 13" Question: "Solve for x: 2x + 5 = 13"
You: "**Question**: Solve for x: 2x + 5 = 13 **Answer**: x = 4 **Why**: Subtract 5 from both sides: 2x = 8, then divide by 2: x = 4."`, You: "**Question**: Solve for x: 2x + 5 = 13 **Answer**: x = 4 **Why**: Subtract 5 from both sides: 2x = 8, then divide by 2: x = 4."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide direct exam answers in **markdown format**. Include the question text, the correct answer choice, and a brief justification. Focus on efficiency and accuracy. Keep responses **short and to the point**.`, Provide direct exam answers in **markdown format**. Include the question text, the correct answer choice, and a brief justification. Focus on efficiency and accuracy. Keep responses **short and to the point**.`,
}, },
}; };
function buildSystemPrompt(promptParts, customPrompt = '', googleSearchEnabled = true) { function buildSystemPrompt(
const sections = [promptParts.intro, '\n\n', promptParts.formatRequirements]; promptParts,
customPrompt = "",
googleSearchEnabled = true,
responseMode = "brief",
) {
const formatReqs =
responseModeFormats[responseMode] || responseModeFormats.brief;
const sections = [
promptParts.intro,
"\n\n",
formatReqs,
"\n\n",
codingAwareness,
];
// Only add search usage section if Google Search is enabled // Only add search usage section if Google Search is enabled
if (googleSearchEnabled) { if (googleSearchEnabled) {
sections.push('\n\n', promptParts.searchUsage); sections.push("\n\n", promptParts.searchUsage);
} }
sections.push('\n\n', promptParts.content, '\n\nUser-provided context\n-----\n', customPrompt, '\n-----\n\n', promptParts.outputInstructions); sections.push(
"\n\n",
promptParts.content,
"\n\nUser-provided context\n-----\n",
customPrompt,
"\n-----\n\n",
promptParts.outputInstructions,
);
return sections.join(''); return sections.join("");
} }
function getSystemPrompt(profile, customPrompt = '', googleSearchEnabled = true) { function getSystemPrompt(
const promptParts = profilePrompts[profile] || profilePrompts.interview; profile,
return buildSystemPrompt(promptParts, customPrompt, googleSearchEnabled); customPrompt = "",
googleSearchEnabled = true,
responseMode = "brief",
) {
const promptParts = profilePrompts[profile] || profilePrompts.interview;
return buildSystemPrompt(
promptParts,
customPrompt,
googleSearchEnabled,
responseMode,
);
} }
// Comprehensive prompt for Vision/Image analysis
const VISION_ANALYSIS_PROMPT = `You are an expert AI assistant analyzing a screenshot. Your task is to understand what the user needs help with and provide the most useful response.
**ANALYSIS APPROACH:**
1. First, identify what's shown on the screen (code editor, math problem, website, document, exam, etc.)
2. Determine what the user likely needs (explanation, solution, answer, debugging help, etc.)
3. Provide a direct, actionable response
**RESPONSE GUIDELINES BY CONTEXT:**
**If it's CODE (LeetCode, HackerRank, coding interview, IDE):**
- Identify the programming language and problem type
- Provide a brief explanation of the approach (2-3 bullet points max)
- Give the complete, working code solution
- Include time/space complexity if relevant
- If there's an error, explain the fix
**If it's MATH or SCIENCE:**
- Show step-by-step solution
- Use proper mathematical notation with LaTeX ($..$ for inline, $$...$$ for blocks)
- Provide the final answer clearly marked
- Include any relevant formulas used
**If it's MCQ/EXAM/QUIZ:**
- State the correct answer immediately and clearly (e.g., "**Answer: B**")
- Provide brief justification (1-2 sentences)
- If multiple questions visible, answer all of them
**If it's a DOCUMENT/ARTICLE/WEBSITE:**
- Summarize the key information
- Answer any specific questions if apparent
- Highlight important points
**If it's a FORM/APPLICATION:**
- Help fill in the required information
- Suggest appropriate responses
- Point out any issues or missing fields
**If it's an ERROR/DEBUG scenario:**
- Identify the error type and cause
- Provide the fix immediately
- Explain briefly why it occurred
**FORMAT REQUIREMENTS:**
- Use **markdown** for formatting
- Use **bold** for key answers and important points
- Use code blocks with language specification for code
- Be concise but complete - no unnecessary explanations
- No pleasantries or filler text - get straight to the answer
**CRITICAL:** Provide the complete answer. Don't ask for clarification - make reasonable assumptions and deliver value immediately.`;
module.exports = { module.exports = {
profilePrompts, profilePrompts,
getSystemPrompt, responseModeFormats,
VISION_ANALYSIS_PROMPT, codingAwareness,
getSystemPrompt,
}; };

File diff suppressed because it is too large Load Diff

332
src/utils/whisperWorker.js Normal file
View File

@ -0,0 +1,332 @@
/**
* Whisper Worker runs ONNX Runtime in an isolated child process.
*
* The main Electron process forks this file and communicates via IPC messages.
* If ONNX Runtime crashes (SIGSEGV/SIGABRT inside the native Metal or CPU
* execution provider), only this worker dies the main process survives and
* can respawn the worker automatically.
*
* Protocol (parent worker):
* parent worker:
* { type: 'load', modelName, cacheDir, device? }
* { type: 'transcribe', audioBase64, language? } // PCM 16-bit 16kHz as base64
* { type: 'shutdown' }
*
* worker parent:
* { type: 'load-result', success, error?, device? }
* { type: 'transcribe-result', success, text?, error? }
* { type: 'status', message }
* { type: 'ready' }
*/
// ── Crash handlers — report fatal errors before the process dies ──
process.on("uncaughtException", (err) => {
try {
send({
type: "status",
message: `[Worker] Uncaught exception: ${err.message || err}`,
});
console.error("[WhisperWorker] Uncaught exception:", err);
} catch (_) {
// Cannot communicate with parent anymore
}
process.exit(1);
});
process.on("unhandledRejection", (reason) => {
try {
send({
type: "status",
message: `[Worker] Unhandled rejection: ${reason?.message || reason}`,
});
console.error("[WhisperWorker] Unhandled rejection:", reason);
} catch (_) {
// Cannot communicate with parent anymore
}
// Don't exit — let it be caught by the pipeline's own handlers
});
let whisperPipeline = null;
/** Which ONNX backend is actually active: "cpu" | "wasm" */
let activeDevice = null;
function pcm16ToFloat32(pcm16Buffer) {
if (!pcm16Buffer || pcm16Buffer.length === 0) {
return new Float32Array(0);
}
const alignedLength =
pcm16Buffer.length % 2 === 0 ? pcm16Buffer.length : pcm16Buffer.length - 1;
const samples = alignedLength / 2;
const float32 = new Float32Array(samples);
for (let i = 0; i < samples; i++) {
float32[i] = pcm16Buffer.readInt16LE(i * 2) / 32768;
}
return float32;
}
/**
* Load the Whisper model.
*
* @param {string} modelName HuggingFace model id, e.g. "Xenova/whisper-small"
* @param {string} cacheDir Directory for cached model files
* @param {string} [device] "cpu" (onnxruntime-node) or "wasm" (onnxruntime-web).
* When "cpu" is requested we try native first and fall
* back to "wasm" on failure (ABI mismatch, etc.).
*/
async function loadModel(modelName, cacheDir, device = "cpu") {
if (whisperPipeline) {
send({ type: "load-result", success: true, device: activeDevice });
return;
}
try {
send({
type: "status",
message: "Loading Whisper model (first time may take a while)...",
});
// Validate / create cache directory
const fs = require("fs");
const path = require("path");
if (cacheDir) {
try {
if (!fs.existsSync(cacheDir)) {
fs.mkdirSync(cacheDir, { recursive: true });
console.log("[WhisperWorker] Created cache directory:", cacheDir);
}
} catch (mkdirErr) {
console.warn(
"[WhisperWorker] Cannot create cache dir:",
mkdirErr.message,
);
}
// Check for corrupted partial downloads — if an onnx file exists but
// is suspiciously small (< 1 KB), delete it so the library re-downloads.
try {
const modelDir = path.join(cacheDir, modelName.replace("/", path.sep));
if (fs.existsSync(modelDir)) {
const walk = (dir) => {
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const full = path.join(dir, entry.name);
if (entry.isDirectory()) {
walk(full);
} else if (
entry.name.endsWith(".onnx") &&
fs.statSync(full).size < 1024
) {
console.warn(
"[WhisperWorker] Removing likely-corrupt file:",
full,
);
fs.unlinkSync(full);
}
}
};
walk(modelDir);
}
} catch (cleanErr) {
console.warn("[WhisperWorker] Cache cleanup error:", cleanErr.message);
}
}
const { pipeline, env } = await import("@huggingface/transformers");
env.cacheDir = cacheDir;
// Attempt to load with the requested device
const devicesToTry = device === "wasm" ? ["wasm"] : ["cpu", "wasm"];
let lastError = null;
for (const dev of devicesToTry) {
try {
send({
type: "status",
message: `Loading Whisper (${dev} backend)...`,
});
console.log(
`[WhisperWorker] Trying device: ${dev}, model: ${modelName}`,
);
whisperPipeline = await pipeline(
"automatic-speech-recognition",
modelName,
{
dtype: "q8",
device: dev,
progress_callback: (progress) => {
// progress: { status, name?, file?, progress?, loaded?, total? }
if (
progress.status === "download" ||
progress.status === "progress"
) {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: progress.progress ?? 0,
loaded: progress.loaded ?? 0,
total: progress.total ?? 0,
status: progress.status,
});
} else if (progress.status === "done") {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: 100,
loaded: progress.total ?? 0,
total: progress.total ?? 0,
status: "done",
});
} else if (progress.status === "initiate") {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: 0,
loaded: 0,
total: 0,
status: "initiate",
});
}
},
},
);
activeDevice = dev;
console.log(
`[WhisperWorker] Model loaded successfully (device: ${dev})`,
);
send({ type: "load-result", success: true, device: dev });
return;
} catch (err) {
lastError = err;
console.error(
`[WhisperWorker] Failed to load with device "${dev}":`,
err.message || err,
);
if (dev === "cpu" && devicesToTry.includes("wasm")) {
send({
type: "status",
message: `Native CPU backend failed (${err.message}). Trying WASM fallback...`,
});
}
// Reset pipeline state before retry
whisperPipeline = null;
}
}
// All devices failed
throw lastError || new Error("All ONNX backends failed");
} catch (error) {
send({ type: "load-result", success: false, error: error.message });
}
}
async function transcribe(audioBase64, language) {
if (!whisperPipeline) {
send({
type: "transcribe-result",
success: false,
error: "Whisper pipeline not loaded",
});
return;
}
try {
const pcm16Buffer = Buffer.from(audioBase64, "base64");
if (pcm16Buffer.length < 2) {
send({
type: "transcribe-result",
success: false,
error: "Audio buffer too small",
});
return;
}
// Cap at ~30 seconds (16kHz, 16-bit mono)
const maxBytes = 16000 * 2 * 30;
const audioData =
pcm16Buffer.length > maxBytes
? pcm16Buffer.slice(0, maxBytes)
: pcm16Buffer;
const float32Audio = pcm16ToFloat32(audioData);
if (float32Audio.length === 0) {
send({
type: "transcribe-result",
success: false,
error: "Empty audio after conversion",
});
return;
}
// Build pipeline options with the requested language
const pipelineOpts = {
sampling_rate: 16000,
task: "transcribe",
};
if (language && language !== "auto") {
pipelineOpts.language = language;
}
const result = await whisperPipeline(float32Audio, pipelineOpts);
const text = result.text?.trim() || "";
send({ type: "transcribe-result", success: true, text });
} catch (error) {
send({
type: "transcribe-result",
success: false,
error: error.message || String(error),
});
}
}
function send(msg) {
try {
if (process.send) {
process.send(msg);
}
} catch (_) {
// Parent may have disconnected
}
}
process.on("message", (msg) => {
switch (msg.type) {
case "load":
loadModel(msg.modelName, msg.cacheDir, msg.device).catch((err) => {
send({ type: "load-result", success: false, error: err.message });
});
break;
case "transcribe":
transcribe(msg.audioBase64, msg.language).catch((err) => {
send({ type: "transcribe-result", success: false, error: err.message });
});
break;
case "shutdown":
// Dispose the ONNX session gracefully before exiting to avoid
// native cleanup race conditions (SIGABRT on mutex destroy).
(async () => {
if (whisperPipeline) {
try {
if (typeof whisperPipeline.dispose === "function") {
await whisperPipeline.dispose();
}
} catch (_) {
// Best-effort cleanup
}
whisperPipeline = null;
}
// Small delay to let native threads wind down
setTimeout(() => process.exit(0), 200);
})();
break;
}
});
// Signal readiness to parent
send({ type: "ready" });

File diff suppressed because it is too large Load Diff

View File

@ -12,4 +12,4 @@ export async function resizeLayout() {
} catch (error) { } catch (error) {
console.error('Error resizing window:', error); console.error('Error resizing window:', error);
} }
} }