Compare commits

...

41 Commits

Author SHA1 Message Date
Илья Глазунов
219f35cc04 chore: bump version to 0.7.9 and remove unused additional DMG options
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-21 23:19:57 +03:00
Илья Глазунов
b6560f3c6c feat: update DMG configuration to use UDZO format and add additional options 2026-02-20 23:53:12 +03:00
Илья Глазунов
b1d9130b50 chore: bump version to 0.7.8 and remove unused DMG options in forge.config.js
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-20 23:33:29 +03:00
Илья Глазунов
07c39455be fix: update release workflow to use macOS runner; bump version to 0.7.7
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-20 23:20:23 +03:00
Илья Глазунов
39fe8d948f feat: update DMG format to UDZO and add additional options; bump version to 0.7.6
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
2026-02-20 23:07:37 +03:00
Илья Глазунов
1330af8d19 feat: update DMG maker configuration and bump version to 0.7.5
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-20 22:54:56 +03:00
Илья Глазунов
c161f251ed Update application logos: replace logo.icns, logo.ico, and logo.png with new versions
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-17 15:15:24 +03:00
Илья Глазунов
d111b88886 fix: update badge colors for latest and stable version in README.md 2026-02-16 23:44:18 +03:00
Илья Глазунов
043b5d159e feat: update README.md with enhanced description and features; add logo image 2026-02-16 23:36:44 +03:00
Илья Глазунов
851edc6da1 feat: add .npmrc file with node-linker set to hoisted
Some checks failed
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
2026-02-16 22:49:23 +03:00
Илья Глазунов
c68a546e72 feat: add '@electron/rebuild' dependency version 3.7.2 to pnpm-lock.yaml
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-16 22:46:41 +03:00
Илья Глазунов
09b2530714 chore: bump version to 0.7.1 in package.json
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-16 22:44:22 +03:00
Илья Глазунов
a9dce5bf3c fix: ensure current directory is included in pnpm workspace packages 2026-02-16 22:43:49 +03:00
Илья Глазунов
31d50c9713 Merge branch 'v0.7.0-update'
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-02-16 22:40:20 +03:00
bbad79875c Merge pull request 'enhancment/code-highlighting-in-llm-chat' (#6) from enhancment/code-highlighting-in-llm-chat into v0.7.0-update
Reviewed-on: #6
2026-02-16 19:32:13 +00:00
Илья Глазунов
7f15b65eb1 feat: add light theme support and update theme detection in renderer 2026-02-16 22:29:30 +03:00
Илья Глазунов
d6dbaa3141 feat: add syntax highlighting for code blocks in AssistantView 2026-02-16 22:29:24 +03:00
2ebde60dcd Merge pull request 'Fixing local transcription flow' (#5) from fix/local-transcription-flow into v0.7.0-update
Reviewed-on: #5
2026-02-16 16:56:55 +00:00
Илья Глазунов
0d56e06724 feat: add whisper progress tracking and UI updates for download status 2026-02-16 19:55:39 +03:00
Илья Глазунов
526bc4e877 feat: enhance Whisper worker integration with system Node.js detection 2026-02-16 17:10:57 +03:00
Илья Глазунов
684b61755c feat: implement Whisper worker for isolated audio transcription 2026-02-16 11:38:26 +03:00
Илья Глазунов
1b74968006 Add multilingual support in CustomizeView and update speech configuration handling in gemini 2026-02-15 04:00:09 +03:00
Илья Глазунов
4cf48ee0af Refactor window management and global shortcuts handling 2026-02-15 00:34:37 +03:00
Илья Глазунов
494e692738 Add OpenAI dependency and implement model loading in MainView for OpenAI-compatible API 2026-02-14 23:16:41 +03:00
Илья Глазунов
8b216bbb33 Rename project from "Cheating Daddy" to "Mastermind" across all configurations and components to reflect the new branding. 2026-02-14 20:31:35 +03:00
Илья Глазунов
bd62cf5524 Add OpenAI-compatible API support with configuration management and response handling 2026-02-14 20:18:02 +03:00
Илья Глазунов
bfd76dc0c1 Add logging for transcription handling and disable proactive audio 2026-02-14 04:28:29 +03:00
Илья Глазунов
310b6b3fbd huge refactor 2026-02-14 04:17:46 +03:00
Илья Глазунов
430895d9ab small fixes 2026-02-13 22:11:01 +03:00
Илья Глазунов
3a8d9705a2 Remove deprecated dependencies from pnpm-lock.yaml to streamline package management and improve project maintainability.
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-16 01:20:03 +03:00
Илья Глазунов
06e178762d Rename project from "Cheating Daddy" to "Mastermind" across all files, update version to 0.6.0, and implement migration functionality for users with existing configurations. Enhance onboarding experience with migration options and update relevant documentation.
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-16 01:16:43 +03:00
Илья Глазунов
656e8f0932 Implement Push-to-Talk feature and enhance audio input settings in AssistantView and CustomizeView. Update README for API key instructions and improve audio processing logic in OpenAI SDK. Adjust pnpm-lock.yaml for dependency updates. 2026-01-16 00:41:58 +03:00
Илья Глазунов
669c019fd8 prettier fix 2026-01-15 23:26:09 +03:00
Илья Глазунов
528dfe01a1 Bump version to 0.5.11 and enhance audio processing with timer-based transcription for Windows
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 21:30:21 +03:00
Илья Глазунов
fb6c8e3fc0 Add custom screen picker dialog for Windows audio capture and update version to 0.5.10
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 21:14:50 +03:00
Илья Глазунов
76d6fc2749 Refactor Windows audio capture setup to streamline processing and remove redundant logging
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 20:45:34 +03:00
Илья Глазунов
2f013b4751 Bump version to 0.5.8 and add silence detection timer for audio processing
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 20:21:06 +03:00
Илья Глазунов
66dce4415a Update author information in package.json 2026-01-15 19:48:41 +03:00
Илья Глазунов
6460349fc7 Bump version to 0.5.7 and update screen capture handler to use system picker
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 19:47:44 +03:00
Илья Глазунов
6926c27f20 похуй
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 19:37:13 +03:00
Илья Глазунов
cbf82f9317 Refactor screen capture handler to use system picker across all platforms
Some checks failed
Build and Release / build (x64, ubuntu-latest, linux) (push) Has been skipped
Build and Release / build (arm64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, macos-latest, darwin) (push) Has been cancelled
Build and Release / build (x64, windows-latest, win32) (push) Has been cancelled
Build and Release / release (push) Has been cancelled
2026-01-15 19:28:45 +03:00
38 changed files with 13656 additions and 9905 deletions

View File

@ -1,100 +1,101 @@
name: Build and Release name: Build and Release
on: on:
push: push:
tags: tags:
- 'v*.*.*' - 'v*.*.*'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs: jobs:
build: build:
if: github.server_url == 'https://github.com' if: github.server_url == 'https://github.com'
strategy:
matrix:
include:
- os: macos-latest
platform: darwin
arch: x64
- os: macos-latest
platform: darwin
arch: arm64
- os: ubuntu-latest
platform: linux
arch: x64
- os: windows-latest
platform: win32
arch: x64
runs-on: ${{ matrix.os }} strategy:
matrix:
include:
- os: macos-latest
platform: darwin
arch: x64
- os: macos-latest
platform: darwin
arch: arm64
- os: ubuntu-latest
platform: linux
arch: x64
- os: windows-latest
platform: win32
arch: x64
steps: runs-on: ${{ matrix.os }}
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js steps:
uses: actions/setup-node@v4 - name: Checkout code
with: uses: actions/checkout@v4
node-version: 20
- name: Setup pnpm - name: Setup Node.js
uses: pnpm/action-setup@v4 uses: actions/setup-node@v4
with: with:
version: 9 node-version: 20
- name: Install dependencies - name: Setup pnpm
run: pnpm install uses: pnpm/action-setup@v4
with:
version: 9
- name: Build for ${{ matrix.platform }}-${{ matrix.arch }} - name: Install dependencies
run: pnpm run make -- --arch=${{ matrix.arch }} run: pnpm install
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload artifacts - name: Build for ${{ matrix.platform }}-${{ matrix.arch }}
uses: actions/upload-artifact@v4 run: pnpm run make -- --arch=${{ matrix.arch }}
with: env:
name: release-${{ matrix.platform }}-${{ matrix.arch }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
path: |
out/make/**/*.dmg
out/make/**/*.zip
out/make/**/*.exe
out/make/**/*.AppImage
out/make/**/*.deb
out/make/**/*.rpm
if-no-files-found: ignore
release: - name: Upload artifacts
needs: build uses: actions/upload-artifact@v4
runs-on: ubuntu-latest with:
if: github.server_url == 'https://github.com' name: release-${{ matrix.platform }}-${{ matrix.arch }}
path: |
permissions: out/make/**/*.dmg
contents: write out/make/**/*.zip
out/make/**/*.exe
out/make/**/*.AppImage
out/make/**/*.deb
out/make/**/*.rpm
if-no-files-found: ignore
compression-level: 0
steps: release:
- name: Download all artifacts needs: build
uses: actions/download-artifact@v4 runs-on: macos-latest
with: if: github.server_url == 'https://github.com'
path: artifacts
pattern: release-*
merge-multiple: true
- name: List artifacts permissions:
run: find artifacts -type f | head -50 contents: write
- name: Create Draft Release steps:
uses: softprops/action-gh-release@v2 - name: Download all artifacts
with: uses: actions/download-artifact@v4
draft: true with:
generate_release_notes: true path: artifacts
files: | pattern: release-*
artifacts/**/*.dmg merge-multiple: true
artifacts/**/*.zip
artifacts/**/*.exe - name: List artifacts
artifacts/**/*.AppImage run: find artifacts -type f | head -50
artifacts/**/*.deb
artifacts/**/*.rpm - name: Create Draft Release
env: uses: softprops/action-gh-release@v2
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with:
draft: true
generate_release_notes: true
files: |
artifacts/**/*.dmg
artifacts/**/*.zip
artifacts/**/*.exe
artifacts/**/*.AppImage
artifacts/**/*.deb
artifacts/**/*.rpm
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,2 +0,0 @@
src/assets
node_modules

View File

@ -1,10 +0,0 @@
{
"semi": true,
"tabWidth": 4,
"printWidth": 150,
"singleQuote": true,
"trailingComma": "es5",
"bracketSpacing": true,
"arrowParens": "avoid",
"endOfLine": "lf"
}

303
README.md
View File

@ -1,60 +1,279 @@
<img width="1299" height="424" alt="cd (1)" src="https://github.com/user-attachments/assets/b25fff4d-043d-4f38-9985-f832ae0d0f6e" /> <div align="center">
<img src="assets/images/logo.png" alt="Mastermind Logo" width="200"/>
## Recall.ai - API for desktop recording
# Mastermind
If youre looking for a hosted desktop recording API, consider checking out [Recall.ai](https://www.recall.ai/product/desktop-recording-sdk/?utm_source=github&utm_medium=sponsorship&utm_campaign=sohzm-cheating-daddy), an API that records Zoom, Google Meet, Microsoft Teams, in-person meetings, and more.
### Your AI Assistant for High-Stakes Conversations
This project is sponsored by Recall.ai.
*Real-time contextual suggestions when you need them most*
[![Release](https://img.shields.io/github/actions/workflow/status/ShiftyX1/Mastermind/release.yml?label=release)](https://github.com/ShiftyX1/Mastermind/actions/workflows/release.yml)
[![License](https://img.shields.io/badge/license-GPL3.0-blue.svg)](LICENSE)
[![Latest Version](https://img.shields.io/github/v/release/ShiftyX1/Mastermind?include_prereleases&label=latest&color=FFFF00)](https://github.com/ShiftyX1/Mastermind/releases)
[![Stable Version](https://img.shields.io/github/v/release/ShiftyX1/Mastermind?color=6666FF)](https://github.com/ShiftyX1/Mastermind/releases)
[![Platform](https://img.shields.io/badge/platform-macOS%20%7C%20Windows-lightgrey.svg)](#requirements)
</div>
--- ---
> [!NOTE] ## What is Mastermind?
> Use latest MacOS and Windows version, older versions have limited support
> [!NOTE] Mastermind is an **AI assistant** for high-stakes conversations. Whether you're in a job interview, closing a deal, or navigating a complex negotiation, Mastermind analyzes what you see and hear in real-time, providing contextual suggestions and talking points to support your responses.
> During testing it wont answer if you ask something, you need to simulate interviewer asking question, which it will answer
A real-time AI assistant that provides contextual help during video calls, interviews, presentations, and meetings using screen capture and audio analysis. Think of it as having an experienced coach reviewing the conversation and offering suggestions, helping you recall relevant information and structure your thoughts more effectively. The AI provides support material—you still need to understand, adapt, and deliver the responses in your own words.
## Features > [!WARNING]
> **AI models can and do make mistakes.** Suggestions may contain errors, outdated information, or inappropriate content. This tool is designed to assist people who already have relevant knowledge and need help organizing their thoughts—not to fake expertise you don't possess. Always verify critical information and use your own judgment.
- **Live AI Assistance**: Real-time help powered by Google Gemini 2.0 Flash Live ### The Hidden Assistant Advantage
- **Screen & Audio Capture**: Analyzes what you see and hear for contextual responses
- **Multiple Profiles**: Interview, Sales Call, Business Meeting, Presentation, Negotiation
- **Transparent Overlay**: Always-on-top window that can be positioned anywhere
- **Click-through Mode**: Make window transparent to clicks when needed
- **Cross-platform**: Works on macOS, Windows, and Linux (kinda, dont use, just for testing rn)
## Setup Mastermind operates discreetly with a transparent overlay that blends into your screen. The system analyzes both visual content and audio in real-time, generating contextual suggestions within seconds. It adapts its suggestions based on your selected scenario—interview, sales, meeting, or presentation. Ghost mode allows you to interact with content behind the overlay without closing it.
1. **Get a Gemini API Key**: Visit [Google AI Studio](https://aistudio.google.com/apikey) **Remember:** This is an assistive tool, not a magic solution. It works best when you have genuine knowledge and need support organizing your thoughts under pressure.
2. **Install Dependencies**: `npm install`
3. **Run the App**: `npm start`
## Usage ## Key Features
1. Enter your Gemini API key in the main window ### Real-Time Multi-Modal Analysis
2. Choose your profile and language in settings Mastermind captures your screen and audio simultaneously, processing both visual content and audio streams to understand conversation context. It analyzes what's being discussed and generates relevant suggestions based on that context. The system supports dual-stream audio capture to distinguish between system audio and your microphone input, though transcription accuracy depends on audio quality, accents, and background noise.
3. Click "Start Session" to begin
4. Position the window using keyboard shortcuts ### Local-First Privacy Option
5. The AI will provide real-time assistance based on your screen and what interview asks Choose between cloud AI providers for maximum performance or run everything locally with Ollama integration and offline Whisper.js transcription. When using local processing, no audio or screen data ever leaves your machine. The local transcription engine uses ONNX Runtime with GPU acceleration support for fast, private speech-to-text conversion.
### Conversation History & Context
Mastermind saves conversation turns during the session, building context as the conversation progresses. You can view session history and export conversations for later review. The AI uses this accumulated context to provide more relevant suggestions as it learns about the discussion topic—though the quality of contextual understanding is limited by the AI model's capabilities and the clarity of the captured audio/screen content.
### Specialized Profiles
Mastermind comes with six pre-configured personas designed for different scenarios:
**Job Interview** — Suggested responses to technical and behavioral questions, STAR method frameworks, and structured talking points based on your background.
**Sales Call** — Objection handling suggestions, closing technique ideas, pricing strategy considerations, and rapport building approaches.
**Business Meeting** — Data-driven talking points, strategic recommendations, and action-oriented communication suggestions.
**Presentation** — Fact-checking support, audience engagement ideas, and recovery suggestions for unexpected situations.
**Negotiation** — Tactical considerations, counter-offer frameworks, and strategic talking points to support your position.
**Exam Assistant** — Information lookup and answer suggestions for exam questions, optimized for quick reference.
### Invisible Design
The transparent overlay stays on top without blocking your view, with keyboard-driven positioning for quick adjustments. You can hide the window instantly with one click if needed, and customize opacity to match your environment perfectly.
### Flexible AI Backend
Mastermind supports multiple AI providers and can work with both cloud and local models:
**Google Gemini** — Fast, cost-effective multimodal processing with excellent vision capabilities. Supports Gemini 2.0 Flash with real-time API for ultra-low latency responses.
**OpenAI** — Industry-leading language understanding with GPT-4 and GPT-4o models.
**Groq** — High-speed inference with competitive pricing and excellent performance.
**Ollama** — Run completely local AI models on your machine for full privacy. No data ever leaves your device.
**Any OpenAI-Compatible API** — Connect to LocalAI, LM Studio, or any custom endpoint that follows the OpenAI API format.
---
## Advanced Features
**Response Modes** — Toggle between Brief mode (1-3 sentences, optimal for quick glances) and Detailed mode (comprehensive explanations with full context) based on your needs during the session.
**Google Search Integration** — Optional real-time web search capability allows the AI to fetch current information. Note that search results may include outdated or incorrect information—always verify critical facts from authoritative sources.
**Custom System Prompts** — Tailor the AI's behavior with custom instructions specific to your industry, role, or situation. Add your resume, company information, or specialized knowledge to improve context relevance.
**Multi-Language Support** — Works in 30+ languages including English, Spanish, German, French, Japanese, Korean, Chinese, Hindi, Arabic, and many more. Auto-detection available for multilingual conversations, though accuracy varies by language and accent.
**Customizable Keyboard Shortcuts** — Every shortcut can be remapped to your preference. Create your own workflow that feels natural to you.
---
## Getting Started
### Installation
#### For Users (Recommended)
Download the latest release for your platform from the [GitHub Releases](https://github.com/ShiftyX1/Mastermind/releases) page:
**macOS:**
1. Download `Mastermind-[version].dmg`
2. Open the DMG file and drag Mastermind to your Applications folder
3. Launch Mastermind from Applications (you may need to allow the app in System Preferences → Security & Privacy on first launch)
**Windows:**
1. Download `Mastermind-[version]-Setup.exe`
2. Run the installer and follow the setup wizard
3. Launch Mastermind from the Start menu or desktop shortcut
#### For Developers
If you want to build from source or contribute to development:
```bash
# Clone the repository
git clone https://github.com/ShiftyX1/Mastermind.git
cd Mastermind
# Install dependencies
pnpm install
# Launch in development mode
pnpm start
# Build distributable packages (DMG for macOS, Setup.exe for Windows)
pnpm run make
# Package without creating installers
pnpm run package
```
### First-Time Setup
**Get Your AI Key:** Start by obtaining an API key from [Google AI Studio](https://aistudio.google.com/apikey) (recommended for beginners), [OpenAI Platform](https://platform.openai.com/api-keys), [Groq Console](https://console.groq.com), or configure a local Ollama instance for complete privacy.
**Configure Your Assistant:** Enter your API key in the main window and select your preferred AI provider and model. Choose your primary use case profile from the six available scenarios. Select your language or use Auto for multilingual support.
**Start Your Session:** Click "Start Session" to activate your hidden assistant. Grant screen recording and audio capture permissions when prompted by your system. Position the overlay window where it's most useful and adjust opacity to blend naturally with your environment.
### Daily Usage
**Starting a Session:** Select the appropriate profile for your scenario—Interview, Sales, Meeting, Presentation, Negotiation, or Exam. Adjust opacity and position to blend naturally with your environment. Choose your audio mode: Speaker Only (system audio), Microphone Only, or Both for dual-stream capture.
**During Your Conversation:** The AI analyzes your screen and audio context in real-time, with suggestions appearing in the overlay. You can type questions directly for clarification and use keyboard shortcuts to reposition or hide the window. Toggle between brief and detailed response modes depending on your needs.
**Important:** Treat AI suggestions as reference material, not verified facts. Quickly scan suggestions, extract useful points, and deliver responses in your own words with your own understanding. Don't read AI responses verbatim—this often sounds unnatural and may include errors.
**Pro Tips:** Position the window in your natural eye-line to avoid obvious glances. Use click-through mode when you need to interact with content behind the overlay. Keep sessions focused on one topic for better context. Enable local transcription when working with sensitive information.
---
## Keyboard Shortcuts ## Keyboard Shortcuts
- **Window Movement**: `Ctrl/Cmd + Arrow Keys` - Move window Master these shortcuts for seamless, discreet operation:
- **Click-through**: `Ctrl/Cmd + M` - Toggle mouse events
- **Close/Back**: `Ctrl/Cmd + \` - Close window or go back
- **Send Message**: `Enter` - Send text to AI
## Audio Capture | Action | Shortcut | Purpose |
|--------|----------|---------|
| **Move Window** | `Ctrl/Cmd + Arrow Keys` | Reposition without using mouse |
| **Toggle Click-Through** | `Ctrl/Cmd + M` | Make window transparent to clicks |
| **Quick Hide** | `Ctrl/Cmd + \` | Instantly hide/show or go back |
| **Send Message** | `Enter` | Send text query to AI |
| **Quick Position** | Custom | Set your favorite window positions |
- **macOS**: [SystemAudioDump](https://github.com/Mohammed-Yasin-Mulla/Sound) for system audio > **Pro Tip**: All shortcuts are fully customizable in settings. Create your own stealth workflow!
- **Windows**: Loopback audio capture
- **Linux**: Microphone input
## Requirements ---
- Electron-compatible OS (macOS, Windows, Linux) ## Audio Capture Technology
- Gemini API key
- Screen recording permissions Mastermind uses advanced audio capture to understand conversations in real-time, with support for both cloud and local transcription.
- Microphone/audio permissions
**macOS** — Leverages [SystemAudioDump](https://github.com/sohzm/systemAudioDump) for crystal-clear system audio capture. Supports three modes: Speaker Only (system audio), Microphone Only (your voice), or Both (simultaneous dual-stream capture).
**Windows** — Professional loopback audio capture for system sounds, with full microphone support and dual-stream capabilities for capturing both sides of the conversation.
**Linux** — Microphone input support. System audio capture is currently in development.
**Local Transcription** — Built-in offline speech-to-text using Whisper.js powered by ONNX Runtime. Your audio is processed locally on your machine without sending data to external services. Supports GPU acceleration on compatible hardware.
---
## Use Cases
### Job Interviews
Get suggested responses and frameworks for technical questions, helping you structure your thoughts using proven methods like STAR. Mastermind can help you recall relevant examples from your background and organize talking points, but you need to adapt and deliver them authentically in your own voice.
### Sales & Client Calls
Access reference material for objection handling and competitive positioning. The system can suggest talking points and strategies by analyzing the conversation context, but closing deals requires genuine understanding of your product and the client's needs—AI suggestions are starting points, not scripts to read verbatim.
### Business Negotiations
Receive strategic considerations and framework suggestions based on the conversation flow. Mastermind can help you structure counter-offers and identify discussion points, but successful negotiation requires reading the room, building rapport, and making judgment calls that AI cannot make for you.
### Presentations & Demos
Get quick fact-checking and audience engagement ideas during your presentation. If questions arise, Mastermind can suggest relevant information, but you should verify accuracy and ensure you genuinely understand what you're presenting—especially important for technical content where deep knowledge is expected.
---
## System Requirements
| Component | Requirement |
|-----------|-------------|
| **Operating System** | macOS 10.15+, Windows 10/11 (latest versions recommended) |
| **Permissions** | Screen recording, audio capture (system audio and/or microphone) |
| **Internet** | Required for cloud AI providers (Gemini, OpenAI, Groq). Optional for local Ollama models |
| **AI Provider** | API key from Gemini, OpenAI, Groq, or local Ollama installation |
| **For Local Transcription** | 4GB+ RAM recommended, GPU acceleration optional but recommended |
**Current Version:** 0.7.3
> [!NOTE]
> **Platform Support**: macOS and Windows are fully supported and tested. Linux support is experimental.
> [!TIP]
> **Testing Mode**: When testing, simulate someone asking you questions. The AI responds to detected questions rather than your own queries.
---
## Known Limitations
**AI Response Quality** — AI models can make mistakes, provide outdated information, or misinterpret context. Always verify critical information and use suggestions as supporting material, not absolute truth. The quality of responses depends heavily on the AI model you choose and the context you provide.
**Not a Replacement for Knowledge** — Mastermind is a tool to help you recall and structure information, not to replace your actual expertise. The most effective use is when you already understand the subject matter and need help articulating or remembering specific details.
**Linux Support** — System audio capture is not yet implemented on Linux. Only microphone input is currently supported.
**Local Transcription Performance** — First-time usage requires downloading the Whisper model files (approximately 150MB). Transcription speed depends on your hardware; GPU acceleration is recommended for optimal performance.
**macOS Permissions** — Screen recording and audio capture require explicit system permissions. You may need to restart the app after granting permissions on first launch.
**Session Context** — The AI maintains context only within the current session. Starting a new session clears previous conversation history (though history can be saved and viewed later).
---
## Privacy & Ethics
**Your Data, Your Control:** All audio and screen capture happens locally on your device. API communications are direct and clear between you and your chosen provider. For complete privacy, you can use local AI models through Ollama without any data leaving your machine.
**Critical Disclaimers:**
- AI models can generate incorrect, biased, or inappropriate content. Always verify important information from authoritative sources.
- This tool provides suggestions, not verified facts. You are responsible for the accuracy of what you say.
- Relying entirely on AI suggestions without understanding the content can backfire—especially in technical or expert conversations where follow-up questions will reveal lack of genuine knowledge.
**Responsible Use:**
Mastermind is designed as a preparation and cognitive support tool—like having notes or a reference guide. It works best when you already have foundational knowledge and need help organizing thoughts or recalling details under pressure.
**Ethical Boundaries:** Always comply with the rules and policies of your specific context. Many situations explicitly prohibit external assistance:
- Academic exams and certification tests typically ban any form of external help
- Some professional interviews and assessments prohibit such tools
- Certain regulated industries have strict rules about information access during calls
- Using AI assistance where prohibited can result in serious consequences, including job loss or legal issues
**Use this tool to support your genuine expertise, not to fake knowledge you don't have.** The best outcomes happen when AI assists someone who understands the subject, not when it replaces actual competence.
---
## Contributing
Based on the excellent work from [Cheating Daddy](https://github.com/sohzm/cheating-daddy).
Contributions are welcome! Please see [AGENTS.md](AGENTS.md) for development guidelines.
---
## License
This project is licensed under the GPL-3.0 License - see the [LICENSE](LICENSE) file for details.
---
<div align="center">
### A tool to support your expertise, not replace it
*Use responsibly. Verify information. Understand what you're saying.*
</div>

BIN
assets/images/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 238 KiB

View File

@ -10,8 +10,6 @@
<true/> <true/>
<key>com.apple.security.cs.disable-library-validation</key> <key>com.apple.security.cs.disable-library-validation</key>
<true/> <true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.device.audio-input</key> <key>com.apple.security.device.audio-input</key>
<true/> <true/>
<key>com.apple.security.device.microphone</key> <key>com.apple.security.device.microphone</key>

View File

@ -1,97 +1,87 @@
const { FusesPlugin } = require('@electron-forge/plugin-fuses'); const { FusesPlugin } = require("@electron-forge/plugin-fuses");
const { FuseV1Options, FuseVersion } = require('@electron/fuses'); const { FuseV1Options, FuseVersion } = require("@electron/fuses");
const path = require('path');
const fs = require('fs');
module.exports = { module.exports = {
packagerConfig: { packagerConfig: {
asar: true, asar: {
extraResource: ['./src/assets/SystemAudioDump'], unpack:
name: 'Cheating Daddy', "**/{onnxruntime-node,onnxruntime-common,@huggingface/transformers,sharp,@img}/**",
icon: 'src/assets/logo',
// Fix executable permissions after packaging
afterCopy: [
(buildPath, electronVersion, platform, arch, callback) => {
if (platform === 'darwin') {
const systemAudioDump = path.join(buildPath, '..', 'Resources', 'SystemAudioDump');
if (fs.existsSync(systemAudioDump)) {
try {
fs.chmodSync(systemAudioDump, 0o755);
console.log('✓ Set executable permissions for SystemAudioDump');
} catch (err) {
console.error('✗ Failed to set permissions:', err.message);
}
} else {
console.warn('SystemAudioDump not found at:', systemAudioDump);
}
}
callback();
},
],
// use `security find-identity -v -p codesigning` to find your identity
// for macos signing
// Use ad-hoc signing with entitlements for local development
osxSign: {
identity: '-', // ad-hoc signing (no Apple Developer account needed)
optionsForFile: (filePath) => {
return {
entitlements: 'entitlements.plist',
};
},
},
// notarize is off - requires Apple Developer account
// osxNotarize: {
// appleId: 'your apple id',
// appleIdPassword: 'app specific password',
// teamId: 'your team id',
// },
}, },
rebuildConfig: {}, extraResource: ["./src/assets/SystemAudioDump"],
makers: [ name: "Mastermind",
{ icon: "src/assets/logo",
name: '@electron-forge/maker-squirrel', // use `security find-identity -v -p codesigning` to find your identity
config: { // for macos signing
name: 'cheating-daddy', // also fuck apple
productName: 'Cheating Daddy', // osxSign: {
shortcutName: 'Cheating Daddy', // identity: '<paste your identity here>',
createDesktopShortcut: true, // optionsForFile: (filePath) => {
createStartMenuShortcut: true, // return {
}, // entitlements: 'entitlements.plist',
// };
// },
// },
// notarize if off cuz i ran this for 6 hours and it still didnt finish
// osxNotarize: {
// appleId: 'your apple id',
// appleIdPassword: 'app specific password',
// teamId: 'your team id',
// },
},
rebuildConfig: {
// Ensure onnxruntime-node is rebuilt against Electron's Node.js headers
// so the native binding matches the ABI used in packaged builds.
onlyModules: ["onnxruntime-node", "sharp"],
},
makers: [
{
name: "@electron-forge/maker-squirrel",
config: {
name: "mastermind",
productName: "Mastermind",
shortcutName: "Mastermind",
createDesktopShortcut: true,
createStartMenuShortcut: true,
},
},
{
name: "@electron-forge/maker-dmg",
platforms: ["darwin"],
config: {
format: "UDZO",
icon: "src/assets/logo.icns",
},
},
{
name: "@reforged/maker-appimage",
platforms: ["linux"],
config: {
options: {
name: "Mastermind",
productName: "Mastermind",
genericName: "AI Assistant",
description: "AI assistant for interviews and learning",
categories: ["Development", "Education"],
icon: "src/assets/logo.png",
}, },
{ },
name: '@electron-forge/maker-dmg', },
platforms: ['darwin'], ],
}, plugins: [
{ {
name: '@reforged/maker-appimage', name: "@electron-forge/plugin-auto-unpack-natives",
platforms: ['linux'], config: {},
config: { },
options: { // Fuses are used to enable/disable various Electron functionality
name: 'Cheating Daddy', // at package time, before code signing the application
productName: 'Cheating Daddy', new FusesPlugin({
genericName: 'AI Assistant', version: FuseVersion.V1,
description: 'AI assistant for interviews and learning', [FuseV1Options.RunAsNode]: false,
categories: ['Development', 'Education'], [FuseV1Options.EnableCookieEncryption]: true,
icon: 'src/assets/logo.png' [FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
} [FuseV1Options.EnableNodeCliInspectArguments]: false,
}, [FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true,
}, [FuseV1Options.OnlyLoadAppFromAsar]: true,
], }),
plugins: [ ],
{
name: '@electron-forge/plugin-auto-unpack-natives',
config: {},
},
// Fuses are used to enable/disable various Electron functionality
// at package time, before code signing the application
new FusesPlugin({
version: FuseVersion.V1,
[FuseV1Options.RunAsNode]: false,
[FuseV1Options.EnableCookieEncryption]: true,
[FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
[FuseV1Options.EnableNodeCliInspectArguments]: false,
[FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true,
[FuseV1Options.OnlyLoadAppFromAsar]: true,
}),
],
}; };

View File

@ -1,46 +1,56 @@
{ {
"name": "cheating-daddy", "name": "mastermind",
"productName": "cheating-daddy", "productName": "Mastermind",
"version": "0.5.3", "version": "0.7.9",
"description": "cheating daddy", "description": "Mastermind AI assistant",
"main": "src/index.js", "main": "src/index.js",
"scripts": { "scripts": {
"start": "electron-forge start", "start": "electron-forge start",
"package": "electron-forge package", "package": "electron-forge package",
"make": "electron-forge make", "make": "electron-forge make",
"publish": "electron-forge publish", "publish": "electron-forge publish",
"lint": "echo \"No linting configured\"" "lint": "echo \"No linting configured\"",
}, "postinstall": "electron-rebuild -f -w onnxruntime-node"
"keywords": [ },
"cheating daddy", "keywords": [
"cheating daddy ai", "mastermind",
"cheating daddy ai assistant", "mastermind ai",
"cheating daddy ai assistant for interviews", "mastermind ai assistant",
"cheating daddy ai assistant for interviews" "mastermind ai assistant for interviews",
], "mastermind ai assistant for interviews"
"author": { ],
"name": "sohzm", "author": {
"email": "sohambharambe9@gmail.com" "name": "ShiftyX1",
}, "email": "lead@pyserve.org"
"license": "GPL-3.0", },
"dependencies": { "license": "GPL-3.0",
"@google/genai": "^1.35.0", "dependencies": {
"electron-squirrel-startup": "^1.0.1", "@google/genai": "^1.41.0",
"openai": "^6.16.0", "@huggingface/transformers": "^3.8.1",
"ws": "^8.18.0" "electron-squirrel-startup": "^1.0.1",
}, "ollama": "^0.6.3",
"devDependencies": { "openai": "^6.22.0",
"@electron-forge/cli": "^7.11.1", "p-retry": "^4.6.2",
"@electron-forge/maker-deb": "^7.11.1", "ws": "^8.19.0"
"@electron-forge/maker-dmg": "^7.11.1", },
"@electron-forge/maker-rpm": "^7.11.1", "devDependencies": {
"@electron-forge/maker-squirrel": "^7.11.1", "@electron-forge/cli": "^7.8.1",
"@electron-forge/maker-zip": "^7.11.1", "@electron-forge/maker-deb": "^7.8.1",
"@electron-forge/plugin-auto-unpack-natives": "^7.11.1", "@electron-forge/maker-dmg": "^7.8.1",
"@electron-forge/plugin-fuses": "^7.11.1", "@electron-forge/maker-rpm": "^7.8.1",
"@electron/fuses": "^2.0.0", "@electron-forge/maker-squirrel": "^7.8.1",
"@electron/osx-sign": "^2.3.0", "@electron-forge/maker-zip": "^7.8.1",
"@reforged/maker-appimage": "^5.1.1", "@electron-forge/plugin-auto-unpack-natives": "^7.8.1",
"electron": "^39.2.7" "@electron-forge/plugin-fuses": "^7.8.1",
"@electron/fuses": "^1.8.0",
"@electron/rebuild": "^3.7.1",
"@reforged/maker-appimage": "^5.0.0",
"electron": "^30.0.5",
"electron-icon-builder": "^2.0.1"
},
"pnpm": {
"overrides": {
"p-retry": "4.6.2"
} }
}
} }

2514
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

11
pnpm-workspace.yaml Normal file
View File

@ -0,0 +1,11 @@
packages:
- '.'
onlyBuiltDependencies:
- electron
- electron-winstaller
- fs-xattr
- macos-alias
- onnxruntime-node
- protobufjs
- sharp

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 176 KiB

After

Width:  |  Height:  |  Size: 353 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 190 KiB

View File

@ -3,7 +3,7 @@ import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class AppHeader extends LitElement { export class AppHeader extends LitElement {
static styles = css` static styles = css`
* { * {
font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif; font-family: var(--font);
cursor: default; cursor: default;
user-select: none; user-select: none;
} }
@ -14,14 +14,14 @@ export class AppHeader extends LitElement {
align-items: center; align-items: center;
padding: var(--header-padding); padding: var(--header-padding);
background: var(--header-background); background: var(--header-background);
border-bottom: 1px solid var(--border-color); border-bottom: 1px solid var(--border);
} }
.header-title { .header-title {
flex: 1; flex: 1;
font-size: var(--header-font-size); font-size: var(--header-font-size);
font-weight: 500; font-weight: 500;
color: var(--text-color); color: var(--text-primary);
-webkit-app-region: drag; -webkit-app-region: drag;
} }
@ -39,8 +39,8 @@ export class AppHeader extends LitElement {
.button { .button {
background: transparent; background: transparent;
color: var(--text-color); color: var(--text-primary);
border: 1px solid var(--border-color); border: 1px solid var(--border);
padding: var(--header-button-padding); padding: var(--header-button-padding);
border-radius: 3px; border-radius: 3px;
font-size: var(--header-font-size-small); font-size: var(--header-font-size-small);
@ -73,7 +73,7 @@ export class AppHeader extends LitElement {
.icon-button:hover { .icon-button:hover {
background: var(--hover-background); background: var(--hover-background);
color: var(--text-color); color: var(--text-primary);
} }
:host([isclickthrough]) .button:hover, :host([isclickthrough]) .button:hover,
@ -86,7 +86,7 @@ export class AppHeader extends LitElement {
padding: 2px 6px; padding: 2px 6px;
border-radius: 3px; border-radius: 3px;
font-size: 11px; font-size: 11px;
font-family: 'SF Mono', Monaco, monospace; font-family: var(--font-mono);
} }
.click-through-indicator { .click-through-indicator {
@ -95,7 +95,7 @@ export class AppHeader extends LitElement {
background: var(--key-background); background: var(--key-background);
padding: 2px 6px; padding: 2px 6px;
border-radius: 3px; border-radius: 3px;
font-family: 'SF Mono', Monaco, monospace; font-family: var(--font-mono);
} }
.update-button { .update-button {
@ -120,148 +120,6 @@ export class AppHeader extends LitElement {
.update-button:hover { .update-button:hover {
background: rgba(241, 76, 76, 0.1); background: rgba(241, 76, 76, 0.1);
} }
.status-wrapper {
position: relative;
display: inline-flex;
align-items: center;
}
.status-text {
font-size: var(--header-font-size-small);
color: var(--text-secondary);
max-width: 120px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.status-text.error {
color: #f14c4c;
}
.status-tooltip {
position: absolute;
top: 100%;
right: 0;
margin-top: 8px;
background: var(--tooltip-bg, #1a1a1a);
color: var(--tooltip-text, #ffffff);
padding: 10px 14px;
border-radius: 6px;
font-size: 12px;
max-width: 300px;
word-wrap: break-word;
white-space: normal;
opacity: 0;
visibility: hidden;
transition: opacity 0.15s ease, visibility 0.15s ease;
pointer-events: none;
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
z-index: 1000;
line-height: 1.4;
}
.status-tooltip::before {
content: '';
position: absolute;
bottom: 100%;
right: 16px;
border: 6px solid transparent;
border-bottom-color: var(--tooltip-bg, #1a1a1a);
}
.status-wrapper:hover .status-tooltip {
opacity: 1;
visibility: visible;
}
.status-tooltip .tooltip-label {
font-size: 10px;
text-transform: uppercase;
opacity: 0.6;
margin-bottom: 4px;
}
.status-tooltip .tooltip-content {
color: #f14c4c;
}
.model-info {
display: flex;
gap: 6px;
align-items: center;
}
.model-badge {
font-size: 10px;
color: var(--text-muted);
background: var(--key-background);
padding: 2px 6px;
border-radius: 3px;
font-family: 'SF Mono', Monaco, monospace;
max-width: 100px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.model-badge-wrapper {
position: relative;
display: inline-flex;
}
.model-badge-wrapper .model-tooltip {
position: absolute;
top: 100%;
right: 0;
margin-top: 8px;
background: var(--tooltip-bg, #1a1a1a);
color: var(--tooltip-text, #ffffff);
padding: 10px 14px;
border-radius: 6px;
font-size: 12px;
white-space: nowrap;
opacity: 0;
visibility: hidden;
transition: opacity 0.15s ease, visibility 0.15s ease;
pointer-events: none;
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
z-index: 1000;
}
.model-badge-wrapper .model-tooltip::before {
content: '';
position: absolute;
bottom: 100%;
right: 16px;
border: 6px solid transparent;
border-bottom-color: var(--tooltip-bg, #1a1a1a);
}
.model-badge-wrapper:hover .model-tooltip {
opacity: 1;
visibility: visible;
}
.model-tooltip-row {
display: flex;
justify-content: space-between;
gap: 16px;
margin-bottom: 4px;
}
.model-tooltip-row:last-child {
margin-bottom: 0;
}
.model-tooltip-label {
opacity: 0.7;
}
.model-tooltip-value {
font-family: 'SF Mono', Monaco, monospace;
}
`; `;
static properties = { static properties = {
@ -276,8 +134,6 @@ export class AppHeader extends LitElement {
onHideToggleClick: { type: Function }, onHideToggleClick: { type: Function },
isClickThrough: { type: Boolean, reflect: true }, isClickThrough: { type: Boolean, reflect: true },
updateAvailable: { type: Boolean }, updateAvailable: { type: Boolean },
aiProvider: { type: String },
modelInfo: { type: Object },
}; };
constructor() { constructor() {
@ -294,8 +150,6 @@ export class AppHeader extends LitElement {
this.isClickThrough = false; this.isClickThrough = false;
this.updateAvailable = false; this.updateAvailable = false;
this._timerInterval = null; this._timerInterval = null;
this.aiProvider = 'gemini';
this.modelInfo = { model: '', visionModel: '', whisperModel: '' };
} }
connectedCallback() { connectedCallback() {
@ -388,15 +242,15 @@ export class AppHeader extends LitElement {
getViewTitle() { getViewTitle() {
const titles = { const titles = {
onboarding: 'Welcome to Cheating Daddy', onboarding: 'Welcome to Mastermind',
main: 'Cheating Daddy', main: 'Mastermind',
customize: 'Customize', customize: 'Customize',
help: 'Help & Shortcuts', help: 'Help & Shortcuts',
history: 'Conversation History', history: 'Conversation History',
advanced: 'Advanced Tools', advanced: 'Advanced Tools',
assistant: 'Cheating Daddy', assistant: 'Mastermind',
}; };
return titles[this.currentView] || 'Cheating Daddy'; return titles[this.currentView] || 'Mastermind';
} }
getElapsedTime() { getElapsedTime() {
@ -417,49 +271,8 @@ export class AppHeader extends LitElement {
return navigationViews.includes(this.currentView); return navigationViews.includes(this.currentView);
} }
getProviderDisplayName() {
const names = {
'gemini': 'Gemini',
'openai-realtime': 'OpenAI Realtime',
'openai-sdk': 'OpenAI SDK'
};
return names[this.aiProvider] || this.aiProvider;
}
renderModelInfo() {
// Only show model info for OpenAI SDK provider
if (this.aiProvider !== 'openai-sdk' || !this.modelInfo) {
return '';
}
const { model, visionModel, whisperModel } = this.modelInfo;
// Show a compact badge with tooltip for model details
return html`
<div class="model-badge-wrapper">
<span class="model-badge" title="Models">${model || 'gpt-4o'}</span>
<div class="model-tooltip">
<div class="model-tooltip-row">
<span class="model-tooltip-label">Text</span>
<span class="model-tooltip-value">${model || 'gpt-4o'}</span>
</div>
<div class="model-tooltip-row">
<span class="model-tooltip-label">Vision</span>
<span class="model-tooltip-value">${visionModel || 'gpt-4o'}</span>
</div>
<div class="model-tooltip-row">
<span class="model-tooltip-label">Speech</span>
<span class="model-tooltip-value">${whisperModel || 'whisper-1'}</span>
</div>
</div>
</div>
`;
}
render() { render() {
const elapsedTime = this.getElapsedTime(); const elapsedTime = this.getElapsedTime();
const isError = this.statusText && (this.statusText.toLowerCase().includes('error') || this.statusText.toLowerCase().includes('failed'));
const shortStatus = isError ? 'Error' : this.statusText;
return html` return html`
<div class="header"> <div class="header">
@ -467,17 +280,8 @@ export class AppHeader extends LitElement {
<div class="header-actions"> <div class="header-actions">
${this.currentView === 'assistant' ${this.currentView === 'assistant'
? html` ? html`
${this.renderModelInfo()}
<span>${elapsedTime}</span> <span>${elapsedTime}</span>
<div class="status-wrapper"> <span>${this.statusText}</span>
<span class="status-text ${isError ? 'error' : ''}">${shortStatus}</span>
${isError ? html`
<div class="status-tooltip">
<div class="tooltip-label">Error Details</div>
<div class="tooltip-content">${this.statusText}</div>
</div>
` : ''}
</div>
${this.isClickThrough ? html`<span class="click-through-indicator">click-through</span>` : ''} ${this.isClickThrough ? html`<span class="click-through-indicator">click-through</span>` : ''}
` `
: ''} : ''}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,143 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { unifiedPageStyles } from './sharedPageStyles.js';
export class AICustomizeView extends LitElement {
static styles = [
unifiedPageStyles,
css`
.unified-page {
height: 100%;
}
.unified-wrap {
height: 100%;
}
section.surface {
flex: 1;
display: flex;
flex-direction: column;
}
.form-grid {
flex: 1;
display: flex;
flex-direction: column;
}
.form-group.vertical {
flex: 1;
display: flex;
flex-direction: column;
}
textarea.control {
flex: 1;
resize: none;
overflow-y: auto;
min-height: 0;
}
`,
];
static properties = {
selectedProfile: { type: String },
onProfileChange: { type: Function },
_context: { state: true },
_providerMode: { state: true },
};
constructor() {
super();
this.selectedProfile = 'interview';
this.onProfileChange = () => {};
this._context = '';
this._providerMode = 'byok';
this._loadFromStorage();
}
async _loadFromStorage() {
try {
const prefs = await cheatingDaddy.storage.getPreferences();
this._context = prefs.customPrompt || '';
this._providerMode = prefs.providerMode || 'byok';
this.requestUpdate();
} catch (error) {
console.error('Error loading AI customize storage:', error);
}
}
_handleProfileChange(e) {
this.onProfileChange(e.target.value);
}
async _handleProviderModeChange(e) {
this._providerMode = e.target.value;
await cheatingDaddy.storage.updatePreference('providerMode', this._providerMode);
this.requestUpdate();
}
async _saveContext(val) {
this._context = val;
await cheatingDaddy.storage.updatePreference('customPrompt', val);
}
_getProfileName(profile) {
const names = {
interview: 'Job Interview',
sales: 'Sales Call',
meeting: 'Business Meeting',
presentation: 'Presentation',
negotiation: 'Negotiation',
exam: 'Exam Assistant',
};
return names[profile] || profile;
}
render() {
const profiles = [
{ value: 'interview', label: 'Job Interview' },
{ value: 'sales', label: 'Sales Call' },
{ value: 'meeting', label: 'Business Meeting' },
{ value: 'presentation', label: 'Presentation' },
{ value: 'negotiation', label: 'Negotiation' },
{ value: 'exam', label: 'Exam Assistant' },
];
return html`
<div class="unified-page">
<div class="unified-wrap">
<div>
<div class="page-title">AI Context</div>
</div>
<section class="surface">
<div class="form-grid">
<div class="form-group">
<label class="form-label">Regime</label>
<select class="control" .value=${this._providerMode} @change=${this._handleProviderModeChange}>
<option value="byok">BYOK (API Keys)</option>
<option value="local">Local AI (Ollama)</option>
</select>
</div>
<div class="form-group">
<label class="form-label">Profile</label>
<select class="control" .value=${this.selectedProfile} @change=${this._handleProfileChange}>
${profiles.map(profile => html`<option value=${profile.value}>${profile.label}</option>`)}
</select>
</div>
<div class="form-group vertical">
<label class="form-label">Custom Instructions</label>
<textarea
class="control"
placeholder="Resume details, role requirements, constraints..."
.value=${this._context}
@input=${e => this._saveContext(e.target.value)}
></textarea>
<div class="form-help">Sent as context at session start. Keep it short.</div>
</div>
</div>
</section>
</div>
</div>
`;
}
}
customElements.define('ai-customize-view', AICustomizeView);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,237 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { unifiedPageStyles } from './sharedPageStyles.js';
export class FeedbackView extends LitElement {
static styles = [
unifiedPageStyles,
css`
.feedback-form {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.feedback-input {
width: 100%;
padding: var(--space-sm) var(--space-md);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
background: var(--bg-elevated);
color: var(--text-primary);
font-size: var(--font-size-sm);
font-family: var(--font);
}
.feedback-input:focus {
outline: none;
border-color: var(--accent);
}
.feedback-input::placeholder {
color: var(--text-muted);
}
textarea.feedback-input {
min-height: 140px;
resize: vertical;
line-height: 1.45;
}
input.feedback-input {
max-width: 260px;
}
.feedback-row {
display: flex;
align-items: center;
gap: var(--space-sm);
}
.feedback-submit {
padding: var(--space-sm) var(--space-md);
border: none;
border-radius: var(--radius-sm);
background: var(--accent);
color: var(--btn-primary-text, #fff);
font-size: var(--font-size-sm);
font-weight: var(--font-weight-medium);
cursor: pointer;
transition: opacity var(--transition);
white-space: nowrap;
}
.feedback-submit:hover {
opacity: 0.85;
}
.feedback-submit:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.feedback-status {
font-size: var(--font-size-xs);
color: var(--text-muted);
}
.feedback-status.success {
color: var(--success);
}
.feedback-status.error {
color: var(--danger);
}
.attach-info {
display: flex;
align-items: center;
gap: var(--space-xs);
font-size: var(--font-size-xs);
color: var(--text-muted);
cursor: pointer;
user-select: none;
}
.attach-info input[type="checkbox"] {
cursor: pointer;
accent-color: var(--accent);
}
`,
];
static properties = {
_feedbackText: { state: true },
_feedbackEmail: { state: true },
_feedbackStatus: { state: true },
_feedbackSending: { state: true },
_attachInfo: { state: true },
_version: { state: true },
};
constructor() {
super();
this._feedbackText = '';
this._feedbackEmail = '';
this._feedbackStatus = '';
this._feedbackSending = false;
this._attachInfo = true;
this._version = '';
this._loadVersion();
}
async _loadVersion() {
try {
this._version = await cheatingDaddy.getVersion();
this.requestUpdate();
} catch (e) {}
}
_getOS() {
const p = navigator.platform || '';
if (p.includes('Mac')) return 'macOS';
if (p.includes('Win')) return 'Windows';
if (p.includes('Linux')) return 'Linux';
return p;
}
async _submitFeedback() {
const text = this._feedbackText.trim();
if (!text || this._feedbackSending) return;
let content = text;
if (this._attachInfo) {
content += `\n\nsent from ${this._getOS()} version ${this._version}`;
}
if (content.length > 2000) {
this._feedbackStatus = 'error:Max 2000 characters';
this.requestUpdate();
return;
}
this._feedbackSending = true;
this._feedbackStatus = '';
this.requestUpdate();
try {
const body = { feedback: content };
if (this._feedbackEmail.trim()) {
body.email = this._feedbackEmail.trim();
}
const res = await fetch('https://api.cheatingdaddy.com/api/feedback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (res.ok) {
this._feedbackText = '';
this._feedbackEmail = '';
this._feedbackStatus = 'success:Feedback sent, thank you!';
} else if (res.status === 429) {
this._feedbackStatus = 'error:Please wait a few minutes before sending again';
} else {
this._feedbackStatus = 'error:Failed to send feedback';
}
} catch (e) {
this._feedbackStatus = 'error:Could not connect to server';
}
this._feedbackSending = false;
this.requestUpdate();
}
render() {
return html`
<div class="unified-page">
<div class="unified-wrap">
<div class="page-title">Feedback</div>
<section class="surface">
<div class="feedback-form">
<textarea
class="feedback-input"
placeholder="Bug reports, feature requests, anything..."
.value=${this._feedbackText}
@input=${e => { this._feedbackText = e.target.value; }}
maxlength="2000"
></textarea>
<input
class="feedback-input"
type="email"
placeholder="Email (optional)"
.value=${this._feedbackEmail}
@input=${e => { this._feedbackEmail = e.target.value; }}
/>
<label class="attach-info">
<input
type="checkbox"
.checked=${this._attachInfo}
@change=${e => { this._attachInfo = e.target.checked; }}
/>
Attach OS and app version
</label>
<div class="feedback-row">
<button
class="feedback-submit"
@click=${() => this._submitFeedback()}
?disabled=${!this._feedbackText.trim() || this._feedbackSending}
>
${this._feedbackSending ? 'Sending...' : 'Send Feedback'}
</button>
${this._feedbackStatus ? html`
<span class="feedback-status ${this._feedbackStatus.split(':')[0]}">
${this._feedbackStatus.split(':').slice(1).join(':')}
</span>
` : ''}
</div>
</div>
</section>
</div>
</div>
`;
}
}
customElements.define('feedback-view', FeedbackView);

View File

@ -1,229 +1,95 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js'; import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
import { resizeLayout } from '../../utils/windowResize.js'; import { unifiedPageStyles } from './sharedPageStyles.js';
export class HelpView extends LitElement { export class HelpView extends LitElement {
static styles = css` static styles = [
* { unifiedPageStyles,
font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif; css`
cursor: default; .shortcut-grid {
user-select: none; display: grid;
} grid-template-columns: 1fr 1fr;
gap: var(--space-sm);
}
:host { .shortcut-row {
display: block; display: flex;
padding: 0; align-items: center;
} justify-content: space-between;
gap: var(--space-sm);
padding: var(--space-sm);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
background: var(--bg-elevated);
}
.help-container { .shortcut-label {
display: flex; color: var(--text-secondary);
flex-direction: column; font-size: var(--font-size-xs);
} }
.option-group { .shortcut-keys {
padding: 16px 12px; display: inline-flex;
border-bottom: 1px solid var(--border-color); gap: 4px;
} flex-wrap: wrap;
justify-content: flex-end;
}
.option-group:last-child { .key {
border-bottom: none; border: 1px solid var(--border);
} border-radius: var(--radius-sm);
padding: 2px 6px;
font-size: var(--font-size-xs);
color: var(--text-primary);
background: var(--bg-surface);
font-family: var(--font-mono);
}
.option-label { .list {
font-size: 11px; display: grid;
font-weight: 600; gap: var(--space-sm);
color: var(--text-muted); }
text-transform: uppercase;
letter-spacing: 0.5px;
margin-bottom: 12px;
}
.description { .list-item {
color: var(--text-secondary); padding: var(--space-sm);
font-size: 12px; border: 1px solid var(--border);
line-height: 1.4; border-radius: var(--radius-sm);
user-select: text; color: var(--text-secondary);
cursor: text; font-size: var(--font-size-sm);
} line-height: 1.45;
background: var(--bg-elevated);
}
.description strong { .link-row {
color: var(--text-color); display: flex;
font-weight: 500; flex-wrap: wrap;
} gap: var(--space-sm);
}
.link { .link-button {
color: var(--text-color); border: 1px solid var(--border);
text-decoration: underline; border-radius: var(--radius-sm);
text-underline-offset: 2px; padding: 8px 10px;
cursor: pointer; background: var(--bg-elevated);
} color: var(--text-primary);
font-size: var(--font-size-sm);
cursor: pointer;
transition: border-color var(--transition), color var(--transition), background var(--transition);
}
.key { .link-button:hover {
background: var(--bg-tertiary); color: var(--text-primary);
color: var(--text-color); border-color: var(--accent);
border: 1px solid var(--border-color); background: rgba(63, 125, 229, 0.14);
padding: 2px 6px; }
border-radius: 3px;
font-size: 10px;
font-family: 'SF Mono', Monaco, monospace;
font-weight: 500;
margin: 0 1px;
white-space: nowrap;
}
.keyboard-section { @media (max-width: 820px) {
display: grid; .shortcut-grid {
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); grid-template-columns: 1fr;
gap: 12px; }
margin-top: 8px; }
}
.keyboard-group { `,
padding: 10px 0; ];
border-bottom: 1px solid var(--border-color);
}
.keyboard-group:last-child {
border-bottom: none;
}
.keyboard-group-title {
font-weight: 600;
font-size: 12px;
color: var(--text-color);
margin-bottom: 8px;
}
.shortcut-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 4px 0;
font-size: 11px;
}
.shortcut-description {
color: var(--text-secondary);
}
.shortcut-keys {
display: flex;
gap: 2px;
}
.profiles-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
gap: 8px;
margin-top: 8px;
}
.profile-item {
padding: 8px 0;
border-bottom: 1px solid var(--border-color);
}
.profile-item:last-child {
border-bottom: none;
}
.profile-name {
font-weight: 500;
font-size: 12px;
color: var(--text-color);
margin-bottom: 2px;
}
.profile-description {
font-size: 11px;
color: var(--text-muted);
line-height: 1.3;
}
.community-links {
display: flex;
gap: 8px;
flex-wrap: wrap;
}
.community-link {
display: flex;
align-items: center;
gap: 6px;
padding: 6px 10px;
background: transparent;
border: 1px solid var(--border-color);
border-radius: 3px;
color: var(--text-color);
font-size: 11px;
font-weight: 500;
transition: background 0.1s ease;
cursor: pointer;
}
.community-link:hover {
background: var(--hover-background);
}
.community-link svg {
width: 14px;
height: 14px;
flex-shrink: 0;
}
.open-logs-btn {
display: inline-flex;
align-items: center;
gap: 6px;
padding: 8px 14px;
background: var(--bg-tertiary);
border: 1px solid var(--border-color);
border-radius: 4px;
color: var(--text-color);
font-size: 12px;
font-weight: 500;
cursor: pointer;
transition: background 0.15s ease;
}
.open-logs-btn:hover {
background: var(--hover-background);
}
.usage-steps {
counter-reset: step-counter;
}
.usage-step {
counter-increment: step-counter;
position: relative;
padding-left: 24px;
margin-bottom: 8px;
font-size: 11px;
line-height: 1.4;
color: var(--text-secondary);
}
.usage-step::before {
content: counter(step-counter);
position: absolute;
left: 0;
top: 0;
width: 16px;
height: 16px;
background: var(--bg-tertiary);
color: var(--text-color);
border-radius: 3px;
display: flex;
align-items: center;
justify-content: center;
font-size: 10px;
font-weight: 600;
}
.usage-step strong {
color: var(--text-color);
}
`;
static properties = { static properties = {
onExternalLinkClick: { type: Function }, onExternalLinkClick: { type: Function },
@ -249,12 +115,6 @@ export class HelpView extends LitElement {
} }
} }
connectedCallback() {
super.connectedCallback();
// Resize window for this view
resizeLayout();
}
getDefaultKeybinds() { getDefaultKeybinds() {
const isMac = cheatingDaddy.isMacOS || navigator.platform.includes('Mac'); const isMac = cheatingDaddy.isMacOS || navigator.platform.includes('Mac');
return { return {
@ -272,222 +132,58 @@ export class HelpView extends LitElement {
}; };
} }
formatKeybind(keybind) { _formatKeybind(keybind) {
return keybind.split('+').map(key => html`<span class="key">${key}</span>`); return keybind.split('+').map(key => html`<span class="key">${key}</span>`);
} }
handleExternalLinkClick(url) { _open(url) {
this.onExternalLinkClick(url); this.onExternalLinkClick(url);
} }
render() { render() {
const isMacOS = cheatingDaddy.isMacOS || false; const shortcutRows = [
const isLinux = cheatingDaddy.isLinux || false; ['Move Window Up', this.keybinds.moveUp],
['Move Window Down', this.keybinds.moveDown],
['Move Window Left', this.keybinds.moveLeft],
['Move Window Right', this.keybinds.moveRight],
['Toggle Visibility', this.keybinds.toggleVisibility],
['Toggle Click-through', this.keybinds.toggleClickThrough],
['Ask Next Step', this.keybinds.nextStep],
['Previous Response', this.keybinds.previousResponse],
['Next Response', this.keybinds.nextResponse],
['Scroll Response Up', this.keybinds.scrollUp],
['Scroll Response Down', this.keybinds.scrollDown],
];
return html` return html`
<div class="help-container"> <div class="unified-page">
<div class="option-group"> <div class="unified-wrap">
<div class="option-label"> <div class="page-title">Help</div>
<span>Community & Support</span>
</div>
<div class="community-links">
<div class="community-link" @click=${() => this.handleExternalLinkClick('https://cheatingdaddy.com')}>
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M14 11.9976C14 9.5059 11.683 7 8.85714 7C8.52241 7 7.41904 7.00001 7.14286 7.00001C4.30254 7.00001 2 9.23752 2 11.9976C2 14.376 3.70973 16.3664 6 16.8714C6.36756 16.9525 6.75006 16.9952 7.14286 16.9952"></path>
<path d="M10 11.9976C10 14.4893 12.317 16.9952 15.1429 16.9952C15.4776 16.9952 16.581 16.9952 16.8571 16.9952C19.6975 16.9952 22 14.7577 22 11.9976C22 9.6192 20.2903 7.62884 18 7.12383C17.6324 7.04278 17.2499 6.99999 16.8571 6.99999"></path>
</svg>
Website
</div>
<div class="community-link" @click=${() => this.handleExternalLinkClick('https://github.com/sohzm/cheating-daddy')}>
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M16 22.0268V19.1568C16.0375 18.68 15.9731 18.2006 15.811 17.7506C15.6489 17.3006 15.3929 16.8902 15.06 16.5468C18.2 16.1968 21.5 15.0068 21.5 9.54679C21.4997 8.15062 20.9627 6.80799 20 5.79679C20.4558 4.5753 20.4236 3.22514 19.91 2.02679C19.91 2.02679 18.73 1.67679 16 3.50679C13.708 2.88561 11.292 2.88561 8.99999 3.50679C6.26999 1.67679 5.08999 2.02679 5.08999 2.02679C4.57636 3.22514 4.54413 4.5753 4.99999 5.79679C4.03011 6.81549 3.49251 8.17026 3.49999 9.57679C3.49999 14.9968 6.79998 16.1868 9.93998 16.5768C9.61098 16.9168 9.35725 17.3222 9.19529 17.7667C9.03334 18.2112 8.96679 18.6849 8.99999 19.1568V22.0268"></path>
<path d="M9 20.0267C6 20.9999 3.5 20.0267 2 17.0267"></path>
</svg>
GitHub
</div>
<div class="community-link" @click=${() => this.handleExternalLinkClick('https://discord.gg/GCBdubnXfJ')}>
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M5.5 16C10.5 18.5 13.5 18.5 18.5 16"></path>
<path d="M15.5 17.5L16.5 19.5C16.5 19.5 20.6713 18.1717 22 16C22 15 22.5301 7.85339 19 5.5C17.5 4.5 15 4 15 4L14 6H12"></path>
<path d="M8.52832 17.5L7.52832 19.5C7.52832 19.5 3.35699 18.1717 2.02832 16C2.02832 15 1.49823 7.85339 5.02832 5.5C6.52832 4.5 9.02832 4 9.02832 4L10.0283 6H12.0283"></path>
<path d="M8.5 14C7.67157 14 7 13.1046 7 12C7 10.8954 7.67157 10 8.5 10C9.32843 10 10 10.8954 10 12C10 13.1046 9.32843 14 8.5 14Z"></path>
<path d="M15.5 14C14.6716 14 14 13.1046 14 12C14 10.8954 14.6716 10 15.5 10C16.3284 10 17 10.8954 17 12C17 13.1046 16.3284 14 15.5 14Z"></path>
</svg>
Discord
</div>
</div>
</div>
<div class="option-group"> <section class="surface">
<div class="option-label"> <div class="surface-title">Support</div>
<span>Keyboard Shortcuts</span> <div class="link-row">
</div> <button class="link-button" @click=${() => this._open('https://cheatingdaddy.com')}>Website</button>
<div class="keyboard-section"> <button class="link-button" @click=${() => this._open('https://github.com/sohzm/cheating-daddy')}>GitHub</button>
<div class="keyboard-group"> <button class="link-button" @click=${() => this._open('https://discord.gg/GCBdubnXfJ')}>Discord</button>
<div class="keyboard-group-title">Window Movement</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window up</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveUp)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window down</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveDown)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window left</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveLeft)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Move window right</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.moveRight)}</div>
</div>
</div> </div>
</section>
<div class="keyboard-group"> <section class="surface">
<div class="keyboard-group-title">Window Control</div> <div class="surface-title">Keyboard Shortcuts</div>
<div class="shortcut-item"> <div class="shortcut-grid">
<span class="shortcut-description">Toggle click-through mode</span> ${shortcutRows.map(([label, keys]) => html`
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.toggleClickThrough)}</div> <div class="shortcut-row">
</div> <span class="shortcut-label">${label}</span>
<div class="shortcut-item"> <span class="shortcut-keys">${this._formatKeybind(keys)}</span>
<span class="shortcut-description">Toggle window visibility</span> </div>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.toggleVisibility)}</div> `)}
</div>
</div> </div>
</section>
<div class="keyboard-group">
<div class="keyboard-group-title">AI Actions</div>
<div class="shortcut-item">
<span class="shortcut-description">Take screenshot and ask for next step</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.nextStep)}</div>
</div>
</div>
<div class="keyboard-group">
<div class="keyboard-group-title">Response Navigation</div>
<div class="shortcut-item">
<span class="shortcut-description">Previous response</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.previousResponse)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Next response</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.nextResponse)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Scroll response up</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.scrollUp)}</div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">Scroll response down</span>
<div class="shortcut-keys">${this.formatKeybind(this.keybinds.scrollDown)}</div>
</div>
</div>
<div class="keyboard-group">
<div class="keyboard-group-title">Text Input</div>
<div class="shortcut-item">
<span class="shortcut-description">Send message to AI</span>
<div class="shortcut-keys"><span class="key">Enter</span></div>
</div>
<div class="shortcut-item">
<span class="shortcut-description">New line in text input</span>
<div class="shortcut-keys"><span class="key">Shift</span><span class="key">Enter</span></div>
</div>
</div>
</div>
<div class="description" style="margin-top: 12px; text-align: center;">
You can customize these shortcuts in Settings.
</div>
</div>
<div class="option-group">
<div class="option-label">
<span>How to Use</span>
</div>
<div class="usage-steps">
<div class="usage-step"><strong>Start a Session:</strong> Enter your Gemini API key and click "Start Session"</div>
<div class="usage-step"><strong>Customize:</strong> Choose your profile and language in the settings</div>
<div class="usage-step">
<strong>Position Window:</strong> Use keyboard shortcuts to move the window to your desired location
</div>
<div class="usage-step">
<strong>Click-through Mode:</strong> Use ${this.formatKeybind(this.keybinds.toggleClickThrough)} to make the window
click-through
</div>
<div class="usage-step"><strong>Get AI Help:</strong> The AI will analyze your screen and audio to provide assistance</div>
<div class="usage-step"><strong>Text Messages:</strong> Type questions or requests to the AI using the text input</div>
<div class="usage-step">
<strong>Navigate Responses:</strong> Use ${this.formatKeybind(this.keybinds.previousResponse)} and
${this.formatKeybind(this.keybinds.nextResponse)} to browse through AI responses
</div>
</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Supported Profiles</span>
</div>
<div class="profiles-grid">
<div class="profile-item">
<div class="profile-name">Job Interview</div>
<div class="profile-description">Get help with interview questions and responses</div>
</div>
<div class="profile-item">
<div class="profile-name">Sales Call</div>
<div class="profile-description">Assistance with sales conversations and objection handling</div>
</div>
<div class="profile-item">
<div class="profile-name">Business Meeting</div>
<div class="profile-description">Support for professional meetings and discussions</div>
</div>
<div class="profile-item">
<div class="profile-name">Presentation</div>
<div class="profile-description">Help with presentations and public speaking</div>
</div>
<div class="profile-item">
<div class="profile-name">Negotiation</div>
<div class="profile-description">Guidance for business negotiations and deals</div>
</div>
<div class="profile-item">
<div class="profile-name">Exam Assistant</div>
<div class="profile-description">Academic assistance for test-taking and exam questions</div>
</div>
</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Audio Input</span>
</div>
<div class="description">The AI listens to conversations and provides contextual assistance based on what it hears.</div>
</div>
<div class="option-group">
<div class="option-label">
<span>Troubleshooting</span>
</div>
<div class="description" style="margin-bottom: 12px;">
If you're experiencing issues with audio capture or other features, check the application logs for diagnostic information.
</div>
<button class="open-logs-btn" @click=${this.openLogsFolder}>
📁 Open Logs Folder
</button>
</div> </div>
</div> </div>
`; `;
} }
async openLogsFolder() {
try {
const { ipcRenderer } = require('electron');
const result = await ipcRenderer.invoke('open-logs-folder');
if (!result.success) {
console.error('Failed to open logs folder:', result.error);
}
} catch (err) {
console.error('Error opening logs folder:', err);
}
}
} }
customElements.define('help-view', HelpView); customElements.define('help-view', HelpView);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -3,13 +3,7 @@ import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class OnboardingView extends LitElement { export class OnboardingView extends LitElement {
static styles = css` static styles = css`
* { * {
font-family: font-family: var(--font);
'Inter',
-apple-system,
BlinkMacSystemFont,
'Segoe UI',
Roboto,
sans-serif;
cursor: default; cursor: default;
user-select: none; user-select: none;
margin: 0; margin: 0;
@ -27,44 +21,20 @@ export class OnboardingView extends LitElement {
overflow: hidden; overflow: hidden;
} }
.onboarding-container { .onboarding {
position: relative;
width: 100%; width: 100%;
height: 100%; height: 100%;
background: #0a0a0a; position: relative;
overflow: hidden;
}
.close-button {
position: absolute;
top: 12px;
right: 12px;
z-index: 10;
background: rgba(255, 255, 255, 0.08);
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: 6px;
width: 32px;
height: 32px;
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
cursor: pointer; border-radius: 12px;
transition: all 0.2s ease; border: 1px solid rgba(0, 0, 0, 0.08);
color: rgba(255, 255, 255, 0.6); overflow: hidden;
background: #f0f0f0;
} }
.close-button:hover { canvas.aurora {
background: rgba(255, 255, 255, 0.12);
border-color: rgba(255, 255, 255, 0.2);
color: rgba(255, 255, 255, 0.9);
}
.close-button svg {
width: 16px;
height: 16px;
}
.gradient-canvas {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
@ -73,166 +43,104 @@ export class OnboardingView extends LitElement {
z-index: 0; z-index: 0;
} }
.content-wrapper { canvas.dither {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
right: 0; width: 100%;
bottom: 60px; height: 100%;
z-index: 1; z-index: 1;
display: flex; opacity: 0.12;
flex-direction: column; mix-blend-mode: overlay;
justify-content: center; pointer-events: none;
padding: 32px 48px; image-rendering: pixelated;
max-width: 500px;
color: #e5e5e5;
overflow: hidden;
} }
.slide-icon { .slide {
width: 48px; position: relative;
height: 48px; z-index: 2;
margin-bottom: 16px; display: flex;
opacity: 0.9; flex-direction: column;
display: block; align-items: center;
text-align: center;
max-width: 400px;
padding: var(--space-xl);
gap: var(--space-md);
} }
.slide-title { .slide-title {
font-size: 28px; font-size: 28px;
font-weight: 600; font-weight: 600;
margin-bottom: 12px; color: #111111;
color: #ffffff; line-height: 1.2;
line-height: 1.3;
} }
.slide-content { .slide-text {
font-size: 16px; font-size: 13px;
line-height: 1.5; line-height: 1.5;
margin-bottom: 24px; color: #666666;
color: #b8b8b8;
font-weight: 400;
} }
.context-textarea { .context-input {
width: 100%; width: 100%;
height: 100px; min-height: 120px;
padding: 16px; padding: 12px;
border: 1px solid rgba(255, 255, 255, 0.1); border: 1px solid rgba(0, 0, 0, 0.12);
border-radius: 8px; border-radius: 8px;
background: rgba(255, 255, 255, 0.05); background: rgba(255, 255, 255, 0.7);
color: #e5e5e5; backdrop-filter: blur(8px);
font-size: 14px; color: #111111;
font-family: inherit; font-size: 13px;
font-family: var(--font);
line-height: 1.5;
resize: vertical; resize: vertical;
transition: all 0.2s ease; text-align: left;
margin-bottom: 24px;
} }
.context-textarea::placeholder { .context-input::placeholder {
color: rgba(255, 255, 255, 0.4); color: #999999;
font-size: 14px;
} }
.context-textarea:focus { .context-input:focus {
outline: none; outline: none;
border-color: rgba(255, 255, 255, 0.2); border-color: rgba(0, 0, 0, 0.3);
background: rgba(255, 255, 255, 0.08);
} }
.feature-list { .actions {
max-width: 100%;
}
.feature-item {
display: flex; display: flex;
flex-direction: column;
align-items: center; align-items: center;
margin-bottom: 12px; gap: 8px;
font-size: 15px; margin-top: 8px;
color: #b8b8b8;
} }
.feature-icon { .btn-primary {
font-size: 16px; background: #111111;
margin-right: 12px; border: none;
opacity: 0.8; color: #ffffff;
} padding: 10px 32px;
border-radius: 8px;
.navigation {
position: absolute;
bottom: 0;
left: 0;
right: 0;
z-index: 2;
display: flex;
align-items: center;
justify-content: space-between;
padding: 16px 24px;
background: rgba(0, 0, 0, 0.3);
backdrop-filter: blur(10px);
border-top: 1px solid rgba(255, 255, 255, 0.05);
height: 60px;
box-sizing: border-box;
}
.nav-button {
background: rgba(255, 255, 255, 0.08);
border: 1px solid rgba(255, 255, 255, 0.1);
color: #e5e5e5;
padding: 8px 16px;
border-radius: 6px;
font-size: 13px; font-size: 13px;
font-weight: 500; font-weight: 500;
cursor: pointer; cursor: pointer;
transition: all 0.2s ease; transition: opacity 0.15s;
display: flex;
align-items: center;
justify-content: center;
min-width: 36px;
min-height: 36px;
} }
.nav-button:hover { .btn-primary:hover {
background: rgba(255, 255, 255, 0.12); opacity: 0.85;
border-color: rgba(255, 255, 255, 0.2);
} }
.nav-button:active { .btn-back {
transform: scale(0.98); background: none;
} border: none;
color: #888888;
.nav-button:disabled { font-size: 11px;
opacity: 0.4;
cursor: not-allowed;
}
.nav-button:disabled:hover {
background: rgba(255, 255, 255, 0.08);
border-color: rgba(255, 255, 255, 0.1);
transform: none;
}
.progress-dots {
display: flex;
gap: 12px;
align-items: center;
}
.dot {
width: 8px;
height: 8px;
border-radius: 50%;
background: rgba(255, 255, 255, 0.2);
transition: all 0.2s ease;
cursor: pointer; cursor: pointer;
padding: 4px 8px;
} }
.dot:hover { .btn-back:hover {
background: rgba(255, 255, 255, 0.4); color: #555555;
}
.dot.active {
background: rgba(255, 255, 255, 0.8);
transform: scale(1.2);
} }
`; `;
@ -240,7 +148,6 @@ export class OnboardingView extends LitElement {
currentSlide: { type: Number }, currentSlide: { type: Number },
contextText: { type: String }, contextText: { type: String },
onComplete: { type: Function }, onComplete: { type: Function },
onClose: { type: Function },
}; };
constructor() { constructor() {
@ -248,220 +155,151 @@ export class OnboardingView extends LitElement {
this.currentSlide = 0; this.currentSlide = 0;
this.contextText = ''; this.contextText = '';
this.onComplete = () => {}; this.onComplete = () => {};
this.onClose = () => {}; this._animId = null;
this.canvas = null; this._time = 0;
this.ctx = null;
this.animationId = null;
// Transition properties
this.isTransitioning = false;
this.transitionStartTime = 0;
this.transitionDuration = 800; // 800ms fade duration
this.previousColorScheme = null;
// Subtle dark color schemes for each slide
this.colorSchemes = [
// Slide 1 - Welcome (Very dark purple/gray)
[
[25, 25, 35], // Dark gray-purple
[20, 20, 30], // Darker gray
[30, 25, 40], // Slightly purple
[15, 15, 25], // Very dark
[35, 30, 45], // Muted purple
[10, 10, 20], // Almost black
],
// Slide 2 - Privacy (Dark blue-gray)
[
[20, 25, 35], // Dark blue-gray
[15, 20, 30], // Darker blue-gray
[25, 30, 40], // Slightly blue
[10, 15, 25], // Very dark blue
[30, 35, 45], // Muted blue
[5, 10, 20], // Almost black
],
// Slide 3 - Context (Dark neutral)
[
[25, 25, 25], // Neutral dark
[20, 20, 20], // Darker neutral
[30, 30, 30], // Light dark
[15, 15, 15], // Very dark
[35, 35, 35], // Lighter dark
[10, 10, 10], // Almost black
],
// Slide 4 - Features (Dark green-gray)
[
[20, 30, 25], // Dark green-gray
[15, 25, 20], // Darker green-gray
[25, 35, 30], // Slightly green
[10, 20, 15], // Very dark green
[30, 40, 35], // Muted green
[5, 15, 10], // Almost black
],
// Slide 5 - Complete (Dark warm gray)
[
[30, 25, 20], // Dark warm gray
[25, 20, 15], // Darker warm
[35, 30, 25], // Slightly warm
[20, 15, 10], // Very dark warm
[40, 35, 30], // Muted warm
[15, 10, 5], // Almost black
],
];
} }
firstUpdated() { firstUpdated() {
this.canvas = this.shadowRoot.querySelector('.gradient-canvas'); this._startAurora();
this.ctx = this.canvas.getContext('2d'); this._drawDither();
this.resizeCanvas();
this.startGradientAnimation();
window.addEventListener('resize', () => this.resizeCanvas());
} }
disconnectedCallback() { disconnectedCallback() {
super.disconnectedCallback(); super.disconnectedCallback();
if (this.animationId) { if (this._animId) cancelAnimationFrame(this._animId);
cancelAnimationFrame(this.animationId); }
_drawDither() {
const canvas = this.shadowRoot.querySelector('canvas.dither');
if (!canvas) return;
const blockSize = 5;
const cols = Math.ceil(canvas.offsetWidth / blockSize);
const rows = Math.ceil(canvas.offsetHeight / blockSize);
canvas.width = cols;
canvas.height = rows;
const ctx = canvas.getContext('2d');
const img = ctx.createImageData(cols, rows);
for (let i = 0; i < img.data.length; i += 4) {
const v = Math.random() > 0.5 ? 255 : 0;
img.data[i] = v;
img.data[i + 1] = v;
img.data[i + 2] = v;
img.data[i + 3] = 255;
} }
window.removeEventListener('resize', () => this.resizeCanvas()); ctx.putImageData(img, 0, 0);
} }
resizeCanvas() { _startAurora() {
if (!this.canvas) return; const canvas = this.shadowRoot.querySelector('canvas.aurora');
if (!canvas) return;
const ctx = canvas.getContext('2d');
const rect = this.getBoundingClientRect(); const scale = 0.35;
this.canvas.width = rect.width; const resize = () => {
this.canvas.height = rect.height; canvas.width = Math.floor(canvas.offsetWidth * scale);
} canvas.height = Math.floor(canvas.offsetHeight * scale);
};
resize();
startGradientAnimation() { const blobs = [
if (!this.ctx) return; { parts: [
{ ox: 0, oy: 0, r: 1.0 },
{ ox: 0.22, oy: 0.1, r: 0.85 },
{ ox: 0.11, oy: 0.05, r: 0.5 },
], color: [180, 200, 230], x: 0.15, y: 0.2, vx: 0.35, vy: 0.25, phase: 0 },
const animate = timestamp => { { parts: [
this.drawGradient(timestamp); { ox: 0, oy: 0, r: 0.95 },
this.animationId = requestAnimationFrame(animate); { ox: 0.18, oy: -0.08, r: 0.75 },
{ ox: 0.09, oy: -0.04, r: 0.4 },
], color: [190, 180, 220], x: 0.75, y: 0.2, vx: -0.3, vy: 0.35, phase: 1.2 },
{ parts: [
{ ox: 0, oy: 0, r: 0.9 },
{ ox: 0.24, oy: 0.12, r: 0.9 },
{ ox: 0.12, oy: 0.06, r: 0.35 },
], color: [210, 195, 215], x: 0.5, y: 0.65, vx: 0.25, vy: -0.3, phase: 2.4 },
{ parts: [
{ ox: 0, oy: 0, r: 0.8 },
{ ox: -0.15, oy: 0.18, r: 0.7 },
{ ox: -0.07, oy: 0.09, r: 0.45 },
], color: [175, 210, 210], x: 0.1, y: 0.75, vx: 0.4, vy: 0.2, phase: 3.6 },
{ parts: [
{ ox: 0, oy: 0, r: 0.75 },
{ ox: 0.12, oy: -0.15, r: 0.65 },
{ ox: 0.06, oy: -0.07, r: 0.35 },
], color: [220, 210, 195], x: 0.85, y: 0.55, vx: -0.28, vy: -0.32, phase: 4.8 },
{ parts: [
{ ox: 0, oy: 0, r: 0.95 },
{ ox: -0.2, oy: -0.12, r: 0.75 },
{ ox: -0.1, oy: -0.06, r: 0.4 },
], color: [170, 190, 225], x: 0.6, y: 0.1, vx: -0.2, vy: 0.38, phase: 6.0 },
{ parts: [
{ ox: 0, oy: 0, r: 0.85 },
{ ox: 0.17, oy: 0.15, r: 0.75 },
{ ox: 0.08, oy: 0.07, r: 0.35 },
], color: [200, 190, 220], x: 0.35, y: 0.4, vx: 0.32, vy: -0.22, phase: 7.2 },
{ parts: [
{ ox: 0, oy: 0, r: 0.75 },
{ ox: -0.13, oy: 0.18, r: 0.65 },
{ ox: -0.06, oy: 0.1, r: 0.4 },
], color: [215, 205, 200], x: 0.9, y: 0.85, vx: -0.35, vy: -0.25, phase: 8.4 },
{ parts: [
{ ox: 0, oy: 0, r: 0.7 },
{ ox: 0.16, oy: -0.1, r: 0.6 },
{ ox: 0.08, oy: -0.05, r: 0.35 },
], color: [185, 210, 205], x: 0.45, y: 0.9, vx: 0.22, vy: -0.4, phase: 9.6 },
];
const baseRadius = 0.32;
const draw = () => {
this._time += 0.012;
const w = canvas.width;
const h = canvas.height;
const dim = Math.min(w, h);
ctx.fillStyle = '#f0f0f0';
ctx.fillRect(0, 0, w, h);
for (const blob of blobs) {
const t = this._time;
const cx = (blob.x + Math.sin(t * blob.vx + blob.phase) * 0.22) * w;
const cy = (blob.y + Math.cos(t * blob.vy + blob.phase * 0.7) * 0.22) * h;
for (const part of blob.parts) {
const wobble = Math.sin(t * 2.5 + part.ox * 25 + blob.phase) * 0.02;
const px = cx + (part.ox + wobble) * dim;
const py = cy + (part.oy + wobble * 0.7) * dim;
const pr = part.r * baseRadius * dim;
const grad = ctx.createRadialGradient(px, py, 0, px, py, pr);
grad.addColorStop(0, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.55)`);
grad.addColorStop(0.4, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.3)`);
grad.addColorStop(0.7, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0.1)`);
grad.addColorStop(1, `rgba(${blob.color[0]}, ${blob.color[1]}, ${blob.color[2]}, 0)`);
ctx.fillStyle = grad;
ctx.fillRect(0, 0, w, h);
}
}
this._animId = requestAnimationFrame(draw);
}; };
animate(0); draw();
}
drawGradient(timestamp) {
if (!this.ctx || !this.canvas) return;
const { width, height } = this.canvas;
let colors = this.colorSchemes[this.currentSlide];
// Handle color scheme transitions
if (this.isTransitioning && this.previousColorScheme) {
const elapsed = timestamp - this.transitionStartTime;
const progress = Math.min(elapsed / this.transitionDuration, 1);
// Use easing function for smoother transition
const easedProgress = this.easeInOutCubic(progress);
colors = this.interpolateColorSchemes(this.previousColorScheme, this.colorSchemes[this.currentSlide], easedProgress);
// End transition when complete
if (progress >= 1) {
this.isTransitioning = false;
this.previousColorScheme = null;
}
}
const time = timestamp * 0.0005; // Much slower animation
// Create moving gradient with subtle flow
const flowX = Math.sin(time * 0.7) * width * 0.3;
const flowY = Math.cos(time * 0.5) * height * 0.2;
const gradient = this.ctx.createLinearGradient(flowX, flowY, width + flowX * 0.5, height + flowY * 0.5);
// Very subtle color variations with movement
colors.forEach((color, index) => {
const offset = index / (colors.length - 1);
const wave = Math.sin(time + index * 0.3) * 0.05; // Very subtle wave
const r = Math.max(0, Math.min(255, color[0] + wave * 5));
const g = Math.max(0, Math.min(255, color[1] + wave * 5));
const b = Math.max(0, Math.min(255, color[2] + wave * 5));
gradient.addColorStop(offset, `rgb(${r}, ${g}, ${b})`);
});
// Fill with moving gradient
this.ctx.fillStyle = gradient;
this.ctx.fillRect(0, 0, width, height);
// Add a second layer with radial gradient for more depth
const centerX = width * 0.5 + Math.sin(time * 0.3) * width * 0.15;
const centerY = height * 0.5 + Math.cos(time * 0.4) * height * 0.1;
const radius = Math.max(width, height) * 0.8;
const radialGradient = this.ctx.createRadialGradient(centerX, centerY, 0, centerX, centerY, radius);
// Very subtle radial overlay
radialGradient.addColorStop(0, `rgba(${colors[0][0] + 10}, ${colors[0][1] + 10}, ${colors[0][2] + 10}, 0.1)`);
radialGradient.addColorStop(0.5, `rgba(${colors[2][0]}, ${colors[2][1]}, ${colors[2][2]}, 0.05)`);
radialGradient.addColorStop(
1,
`rgba(${colors[colors.length - 1][0]}, ${colors[colors.length - 1][1]}, ${colors[colors.length - 1][2]}, 0.03)`
);
this.ctx.globalCompositeOperation = 'overlay';
this.ctx.fillStyle = radialGradient;
this.ctx.fillRect(0, 0, width, height);
this.ctx.globalCompositeOperation = 'source-over';
}
nextSlide() {
if (this.currentSlide < 4) {
this.startColorTransition(this.currentSlide + 1);
} else {
this.completeOnboarding();
}
}
prevSlide() {
if (this.currentSlide > 0) {
this.startColorTransition(this.currentSlide - 1);
}
}
startColorTransition(newSlide) {
this.previousColorScheme = [...this.colorSchemes[this.currentSlide]];
this.currentSlide = newSlide;
this.isTransitioning = true;
this.transitionStartTime = performance.now();
}
// Interpolate between two color schemes
interpolateColorSchemes(scheme1, scheme2, progress) {
return scheme1.map((color1, index) => {
const color2 = scheme2[index];
return [
color1[0] + (color2[0] - color1[0]) * progress,
color1[1] + (color2[1] - color1[1]) * progress,
color1[2] + (color2[2] - color1[2]) * progress,
];
});
}
// Easing function for smooth transitions
easeInOutCubic(t) {
return t < 0.5 ? 4 * t * t * t : 1 - Math.pow(-2 * t + 2, 3) / 2;
} }
handleContextInput(e) { handleContextInput(e) {
this.contextText = e.target.value; this.contextText = e.target.value;
} }
async handleClose() {
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('quit-application');
}
}
async completeOnboarding() { async completeOnboarding() {
if (this.contextText.trim()) { if (this.contextText.trim()) {
await cheatingDaddy.storage.updatePreference('customPrompt', this.contextText.trim()); await cheatingDaddy.storage.updatePreference('customPrompt', this.contextText.trim());
@ -470,120 +308,43 @@ export class OnboardingView extends LitElement {
this.onComplete(); this.onComplete();
} }
getSlideContent() { renderSlide() {
const slides = [ if (this.currentSlide === 0) {
{ return html`
icon: 'assets/onboarding/welcome.svg', <div class="slide">
title: 'Welcome to Cheating Daddy', <div class="slide-title">Mastermind</div>
content: <div class="slide-text">Real-time AI that listens, watches, and helps during interviews, meetings, and exams.</div>
'Your AI assistant that listens and watches, then provides intelligent suggestions automatically during interviews and meetings.', <div class="actions">
}, <button class="btn-primary" @click=${() => { this.currentSlide = 1; }}>Continue</button>
{ </div>
icon: 'assets/onboarding/security.svg', </div>
title: 'Completely Private', `;
content: 'Invisible to screen sharing apps and recording software. Your secret advantage stays completely hidden from others.', }
},
{
icon: 'assets/onboarding/context.svg',
title: 'Add Your Context',
content: 'Share relevant information to help the AI provide better, more personalized assistance.',
showTextarea: true,
},
{
icon: 'assets/onboarding/customize.svg',
title: 'Additional Features',
content: '',
showFeatures: true,
},
{
icon: 'assets/onboarding/ready.svg',
title: 'Ready to Go',
content: 'Add your Gemini API key in settings and start getting AI-powered assistance in real-time.',
},
];
return slides[this.currentSlide]; return html`
<div class="slide">
<div class="slide-title">Add context</div>
<div class="slide-text">Paste your resume or any info the AI should know. You can skip this and add it later.</div>
<textarea
class="context-input"
placeholder="Resume, job description, notes..."
.value=${this.contextText}
@input=${this.handleContextInput}
></textarea>
<div class="actions">
<button class="btn-primary" @click=${this.completeOnboarding}>Get Started</button>
<button class="btn-back" @click=${() => { this.currentSlide = 0; }}>Back</button>
</div>
</div>
`;
} }
render() { render() {
const slide = this.getSlideContent();
return html` return html`
<div class="onboarding-container"> <div class="onboarding">
<button class="close-button" @click=${this.handleClose} title="Close"> <canvas class="aurora"></canvas>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"> <canvas class="dither"></canvas>
<path d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z" /> ${this.renderSlide()}
</svg>
</button>
<canvas class="gradient-canvas"></canvas>
<div class="content-wrapper">
<img class="slide-icon" src="${slide.icon}" alt="${slide.title} icon" />
<div class="slide-title">${slide.title}</div>
<div class="slide-content">${slide.content}</div>
${slide.showTextarea
? html`
<textarea
class="context-textarea"
placeholder="Paste your resume, job description, or any relevant context here..."
.value=${this.contextText}
@input=${this.handleContextInput}
></textarea>
`
: ''}
${slide.showFeatures
? html`
<div class="feature-list">
<div class="feature-item">
<span class="feature-icon">-</span>
Customize AI behavior and responses
</div>
<div class="feature-item">
<span class="feature-icon">-</span>
Review conversation history
</div>
<div class="feature-item">
<span class="feature-icon">-</span>
Adjust capture settings and intervals
</div>
</div>
`
: ''}
</div>
<div class="navigation">
<button class="nav-button" @click=${this.prevSlide} ?disabled=${this.currentSlide === 0}>
<svg width="16px" height="16px" stroke-width="2" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M15 6L9 12L15 18" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"></path>
</svg>
</button>
<div class="progress-dots">
${[0, 1, 2, 3, 4].map(
index => html`
<div
class="dot ${index === this.currentSlide ? 'active' : ''}"
@click=${() => {
if (index !== this.currentSlide) {
this.startColorTransition(index);
}
}}
></div>
`
)}
</div>
<button class="nav-button" @click=${this.nextSlide}>
${this.currentSlide === 4
? 'Get Started'
: html`
<svg width="16px" height="16px" stroke-width="2" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9 6L15 12L9 18" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"></path>
</svg>
`}
</button>
</div>
</div> </div>
`; `;
} }

View File

@ -0,0 +1,172 @@
import { css } from '../../assets/lit-core-2.7.4.min.js';
export const unifiedPageStyles = css`
* {
box-sizing: border-box;
font-family: var(--font);
cursor: default;
user-select: none;
}
:host {
display: block;
height: 100%;
}
.unified-page {
height: 100%;
overflow-y: auto;
padding: var(--space-lg);
background: var(--bg-app);
}
.unified-wrap {
width: 100%;
max-width: 1160px;
margin: 0 auto;
display: flex;
flex-direction: column;
gap: var(--space-md);
min-height: 100%;
}
.page-title {
font-size: var(--font-size-xl);
font-weight: var(--font-weight-semibold);
color: var(--text-primary);
margin-bottom: 4px;
}
.page-subtitle {
color: var(--text-muted);
font-size: var(--font-size-sm);
}
.surface {
border: 1px solid var(--border);
border-radius: var(--radius-md);
background: var(--bg-surface);
padding: var(--space-md);
}
.surface-title {
color: var(--text-primary);
font-size: var(--font-size-md);
font-weight: var(--font-weight-semibold);
margin-bottom: 4px;
}
.surface-subtitle {
color: var(--text-muted);
font-size: var(--font-size-xs);
margin-bottom: var(--space-md);
}
.form-grid {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.form-row {
display: flex;
flex-direction: column;
gap: var(--space-sm);
}
.form-group {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--space-md);
}
.form-group.vertical {
flex-direction: column;
align-items: stretch;
}
.form-label {
color: var(--text-secondary);
font-size: var(--font-size-sm);
white-space: nowrap;
flex-shrink: 0;
}
.form-help {
color: var(--text-muted);
font-size: var(--font-size-xs);
line-height: 1.4;
}
.control {
width: 200px;
background: var(--bg-elevated);
color: var(--text-primary);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
padding: 8px 12px;
font-size: var(--font-size-sm);
transition: border-color var(--transition), box-shadow var(--transition);
}
.control:hover:not(:focus) {
border-color: var(--border-strong);
}
.control:focus {
outline: none;
border-color: var(--accent);
box-shadow: 0 0 0 1px var(--accent);
}
select.control {
appearance: none;
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b6b6b' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e");
background-position: right 8px center;
background-repeat: no-repeat;
background-size: 12px;
padding-right: 28px;
cursor: pointer;
}
textarea.control {
width: 100%;
min-height: 100px;
resize: vertical;
line-height: 1.45;
}
.chip {
display: inline-flex;
align-items: center;
border-radius: var(--radius-sm);
background: var(--bg-elevated);
color: var(--text-secondary);
padding: 2px 8px;
font-size: var(--font-size-xs);
font-family: var(--font-mono);
}
.pill {
border: 1px solid var(--border);
border-radius: 999px;
padding: 2px 8px;
font-size: var(--font-size-xs);
color: var(--text-muted);
}
.muted {
color: var(--text-muted);
}
.danger {
color: var(--danger);
}
@media (max-width: 640px) {
.unified-page {
padding: var(--space-md);
}
}
`;

View File

@ -5,75 +5,112 @@
<title>Screen and Audio Capture</title> <title>Screen and Audio Capture</title>
<style> <style>
:root { :root {
/* Backgrounds - with default 0.8 transparency */ /* Backgrounds */
--background-transparent: transparent; --bg-app: #0A0A0A;
--bg-primary: rgba(30, 30, 30, 0.8); --bg-surface: #111111;
--bg-secondary: rgba(37, 37, 38, 0.8); --bg-elevated: #191919;
--bg-tertiary: rgba(45, 45, 45, 0.8); --bg-hover: #1F1F1F;
--bg-hover: rgba(50, 50, 50, 0.8);
/* Text */ /* Text */
--text-color: #e5e5e5; --text-primary: #F5F5F5;
--text-secondary: #a0a0a0; --text-secondary: #999999;
--text-muted: #6b6b6b; --text-muted: #555555;
--description-color: #a0a0a0;
--placeholder-color: #6b6b6b;
/* Borders */ /* Borders & Lines */
--border-color: #3c3c3c; --border: #222222;
--border-subtle: #3c3c3c; --border-strong: #333333;
--border-default: #4a4a4a;
/* Component backgrounds - with default 0.8 transparency */ /* Accent */
--header-background: rgba(30, 30, 30, 0.8); --accent: #3B82F6;
--header-actions-color: #a0a0a0; --accent-hover: #2563EB;
--main-content-background: rgba(30, 30, 30, 0.8);
/* Status */
--success: #22C55E;
--warning: #D4A017;
--danger: #EF4444;
/* Typography */
--font: 'Inter', -apple-system, BlinkMacSystemFont, system-ui, sans-serif;
--font-mono: 'SF Mono', 'Menlo', 'Monaco', 'Consolas', monospace;
--font-size-xs: 11px;
--font-size-sm: 13px;
--font-size-base: 14px;
--font-size-lg: 16px;
--font-size-xl: 20px;
--font-size-2xl: 28px;
--font-weight-normal: 400;
--font-weight-medium: 500;
--font-weight-semibold: 600;
--line-height: 1.6;
/* Spacing */
--space-xs: 4px;
--space-sm: 8px;
--space-md: 16px;
--space-lg: 24px;
--space-xl: 40px;
--space-2xl: 64px;
/* Radius */
--radius-sm: 4px;
--radius-md: 8px;
--radius-lg: 12px;
/* Transitions */
--transition: 150ms ease;
/* Sidebar */
--sidebar-width: 220px;
--sidebar-width-collapsed: 60px;
/* Legacy compatibility — mapped to new tokens */
--background-transparent: transparent;
--bg-primary: var(--bg-app);
--bg-secondary: var(--bg-surface);
--bg-tertiary: var(--bg-elevated);
--text-color: var(--text-primary);
--description-color: var(--text-secondary);
--placeholder-color: var(--text-muted);
--border-color: var(--border);
--border-subtle: var(--border);
--border-default: var(--border-strong);
--header-background: var(--bg-surface);
--header-actions-color: var(--text-secondary);
--main-content-background: var(--bg-app);
--button-background: transparent; --button-background: transparent;
--button-border: #3c3c3c; --button-border: var(--border-strong);
--icon-button-color: #a0a0a0; --icon-button-color: var(--text-secondary);
--hover-background: rgba(50, 50, 50, 0.8); --hover-background: var(--bg-hover);
--input-background: rgba(45, 45, 45, 0.8); --input-background: var(--bg-elevated);
--input-focus-background: rgba(45, 45, 45, 0.8); --input-focus-background: var(--bg-elevated);
--focus-border-color: var(--accent);
/* Focus states - neutral */
--focus-border-color: #4a4a4a;
--focus-box-shadow: transparent; --focus-box-shadow: transparent;
--scrollbar-track: var(--bg-app);
--scrollbar-thumb: var(--border-strong);
--scrollbar-thumb-hover: #444444;
--scrollbar-background: var(--bg-app);
--start-button-background: var(--accent);
--start-button-color: #ffffff;
--start-button-border: var(--accent);
--start-button-hover-background: var(--accent-hover);
--start-button-hover-border: var(--accent-hover);
--text-input-button-background: var(--accent);
--text-input-button-hover: var(--accent-hover);
--link-color: var(--accent);
--key-background: var(--bg-elevated);
--success-color: var(--success);
--warning-color: var(--warning);
--error-color: var(--danger);
--danger-color: var(--danger);
--preview-video-background: var(--bg-surface);
--preview-video-border: var(--border);
--option-label-color: var(--text-primary);
--screen-option-background: var(--bg-surface);
--screen-option-hover-background: var(--bg-elevated);
--screen-option-selected-background: var(--bg-hover);
--screen-option-text: var(--text-secondary);
/* Scrollbar */ /* Layout-specific */
--scrollbar-track: #1e1e1e;
--scrollbar-thumb: #3c3c3c;
--scrollbar-thumb-hover: #4a4a4a;
--scrollbar-background: #1e1e1e;
/* Legacy/misc */
--preview-video-background: #1e1e1e;
--preview-video-border: #3c3c3c;
--option-label-color: #e5e5e5;
--screen-option-background: #252526;
--screen-option-hover-background: #2d2d2d;
--screen-option-selected-background: #323232;
--screen-option-text: #a0a0a0;
/* Buttons */
--start-button-background: #ffffff;
--start-button-color: #1e1e1e;
--start-button-border: #ffffff;
--start-button-hover-background: #e0e0e0;
--start-button-hover-border: #e0e0e0;
--text-input-button-background: #ffffff;
--text-input-button-hover: #e0e0e0;
/* Links - neutral */
--link-color: #e5e5e5;
--key-background: #2d2d2d;
/* Status colors */
--success-color: #4ec9b0;
--warning-color: #dcdcaa;
--error-color: #f14c4c;
--danger-color: #f14c4c;
/* Layout-specific variables */
--header-padding: 8px 16px; --header-padding: 8px 16px;
--header-font-size: 14px; --header-font-size: 14px;
--header-gap: 8px; --header-gap: 8px;
@ -81,48 +118,65 @@
--header-icon-padding: 6px; --header-icon-padding: 6px;
--header-font-size-small: 12px; --header-font-size-small: 12px;
--main-content-padding: 16px; --main-content-padding: 16px;
--main-content-margin-top: 1px; --main-content-margin-top: 0;
--icon-size: 18px; --icon-size: 18px;
--border-radius: 3px; --border-radius: var(--radius-sm);
--content-border-radius: 0; --content-border-radius: 0;
} }
/* Compact layout styles */ html {
:root.compact-layout { margin: 0;
--header-padding: 6px 12px; padding: 0;
--header-font-size: 12px; height: 100%;
--header-gap: 6px; overflow: hidden;
--header-button-padding: 4px 8px; border-radius: 12px;
--header-icon-padding: 4px; background: transparent;
--header-font-size-small: 10px;
--main-content-padding: 12px;
--main-content-margin-top: 1px;
--icon-size: 16px;
--border-radius: 3px;
--content-border-radius: 0;
} }
html,
body { body {
margin: 0; margin: 0;
padding: 0; padding: 0;
height: 100%; height: 100%;
overflow: hidden; overflow: hidden;
background: transparent; background: var(--bg-app);
} color: var(--text-primary);
line-height: var(--line-height);
body { border-radius: 12px;
font-family: border: 1px solid var(--border);
'Inter', font-family: var(--font);
-apple-system, font-size: var(--font-size-base);
BlinkMacSystemFont, font-weight: var(--font-weight-normal);
sans-serif; -webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
} }
* { * {
box-sizing: border-box; box-sizing: border-box;
} }
:focus-visible {
outline: 2px solid var(--accent);
outline-offset: 2px;
}
::-webkit-scrollbar {
width: 6px;
height: 6px;
}
::-webkit-scrollbar-track {
background: transparent;
}
::-webkit-scrollbar-thumb {
background: var(--border-strong);
border-radius: 3px;
}
::-webkit-scrollbar-thumb:hover {
background: #444444;
}
cheating-daddy-app { cheating-daddy-app {
display: block; display: block;
width: 100%; width: 100%;

View File

@ -1,350 +1,336 @@
if (require('electron-squirrel-startup')) { if (require("electron-squirrel-startup")) {
process.exit(0); process.exit(0);
} }
const { app, BrowserWindow, shell, ipcMain } = require('electron'); // ── Global crash handlers to prevent silent process termination ──
const { createWindow, updateGlobalShortcuts } = require('./utils/window'); process.on("uncaughtException", (error) => {
const { setupAIProviderIpcHandlers } = require('./utils/ai-provider-manager'); console.error("[FATAL] Uncaught exception:", error);
const { stopMacOSAudioCapture } = require('./utils/gemini'); try {
const { initLogger, closeLogger, getLogPath } = require('./utils/logger'); const { sendToRenderer } = require("./utils/gemini");
const storage = require('./storage'); sendToRenderer(
"update-status",
"Fatal error: " + (error?.message || "unknown"),
);
} catch (_) {
// sendToRenderer may not be available yet
}
});
process.on("unhandledRejection", (reason) => {
console.error("[FATAL] Unhandled promise rejection:", reason);
try {
const { sendToRenderer } = require("./utils/gemini");
sendToRenderer(
"update-status",
"Unhandled error: " +
(reason instanceof Error ? reason.message : String(reason)),
);
} catch (_) {
// sendToRenderer may not be available yet
}
});
const { app, BrowserWindow, shell, ipcMain } = require("electron");
const { createWindow, updateGlobalShortcuts } = require("./utils/window");
const {
setupGeminiIpcHandlers,
stopMacOSAudioCapture,
sendToRenderer,
} = require("./utils/gemini");
const storage = require("./storage");
const geminiSessionRef = { current: null }; const geminiSessionRef = { current: null };
let mainWindow = null; let mainWindow = null;
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function createMainWindow() { function createMainWindow() {
mainWindow = createWindow(sendToRenderer, geminiSessionRef); mainWindow = createWindow(sendToRenderer, geminiSessionRef);
return mainWindow; return mainWindow;
} }
app.whenReady().then(async () => { app.whenReady().then(async () => {
// Initialize file logger first // Initialize storage (checks version, resets if needed)
const logPath = initLogger(); storage.initializeStorage();
console.log('App starting, log file:', logPath);
// Initialize storage (checks version, resets if needed) // Trigger screen recording permission prompt on macOS if not already granted
storage.initializeStorage(); if (process.platform === "darwin") {
const { desktopCapturer } = require("electron");
desktopCapturer.getSources({ types: ["screen"] }).catch(() => {});
}
createMainWindow();
setupGeminiIpcHandlers(geminiSessionRef);
setupStorageIpcHandlers();
setupGeneralIpcHandlers();
});
app.on("window-all-closed", () => {
stopMacOSAudioCapture();
if (process.platform !== "darwin") {
app.quit();
}
});
app.on("before-quit", () => {
stopMacOSAudioCapture();
});
app.on("activate", () => {
if (BrowserWindow.getAllWindows().length === 0) {
createMainWindow(); createMainWindow();
setupAIProviderIpcHandlers(geminiSessionRef); }
setupStorageIpcHandlers();
setupGeneralIpcHandlers();
// Add handler to get log path from renderer
ipcMain.handle('get-log-path', () => getLogPath());
// Add handler for renderer logs (so they go to the log file)
ipcMain.on('renderer-log', (event, { level, message }) => {
const prefix = '[RENDERER]';
if (level === 'error') console.error(prefix, message);
else if (level === 'warn') console.warn(prefix, message);
else console.log(prefix, message);
});
});
app.on('window-all-closed', () => {
stopMacOSAudioCapture();
closeLogger();
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('before-quit', () => {
stopMacOSAudioCapture();
closeLogger();
});
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) {
createMainWindow();
}
}); });
function setupStorageIpcHandlers() { function setupStorageIpcHandlers() {
// ============ CONFIG ============ // ============ CONFIG ============
ipcMain.handle('storage:get-config', async () => { ipcMain.handle("storage:get-config", async () => {
try { try {
return { success: true, data: storage.getConfig() }; return { success: true, data: storage.getConfig() };
} catch (error) { } catch (error) {
console.error('Error getting config:', error); console.error("Error getting config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-config', async (event, config) => { ipcMain.handle("storage:set-config", async (event, config) => {
try { try {
storage.setConfig(config); storage.setConfig(config);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting config:', error); console.error("Error setting config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:update-config', async (event, key, value) => { ipcMain.handle("storage:update-config", async (event, key, value) => {
try { try {
storage.updateConfig(key, value); storage.updateConfig(key, value);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error updating config:', error); console.error("Error updating config:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ CREDENTIALS ============ // ============ CREDENTIALS ============
ipcMain.handle('storage:get-credentials', async () => { ipcMain.handle("storage:get-credentials", async () => {
try { try {
return { success: true, data: storage.getCredentials() }; return { success: true, data: storage.getCredentials() };
} catch (error) { } catch (error) {
console.error('Error getting credentials:', error); console.error("Error getting credentials:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-credentials', async (event, credentials) => { ipcMain.handle("storage:set-credentials", async (event, credentials) => {
try { try {
storage.setCredentials(credentials); storage.setCredentials(credentials);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting credentials:', error); console.error("Error setting credentials:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-api-key', async () => { ipcMain.handle("storage:get-api-key", async () => {
try { try {
return { success: true, data: storage.getApiKey() }; return { success: true, data: storage.getApiKey() };
} catch (error) { } catch (error) {
console.error('Error getting API key:', error); console.error("Error getting API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-api-key', async (event, apiKey) => { ipcMain.handle("storage:set-api-key", async (event, apiKey) => {
try { try {
storage.setApiKey(apiKey); storage.setApiKey(apiKey);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting API key:', error); console.error("Error setting API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-openai-credentials', async () => { ipcMain.handle("storage:get-groq-api-key", async () => {
try { try {
return { success: true, data: storage.getOpenAICredentials() }; return { success: true, data: storage.getGroqApiKey() };
} catch (error) { } catch (error) {
console.error('Error getting OpenAI credentials:', error); console.error("Error getting Groq API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-openai-credentials', async (event, config) => { ipcMain.handle("storage:set-groq-api-key", async (event, groqApiKey) => {
try { try {
storage.setOpenAICredentials(config); storage.setGroqApiKey(groqApiKey);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting OpenAI credentials:', error); console.error("Error setting Groq API key:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-openai-sdk-credentials', async () => { // ============ PREFERENCES ============
try { ipcMain.handle("storage:get-preferences", async () => {
return { success: true, data: storage.getOpenAISDKCredentials() }; try {
} catch (error) { return { success: true, data: storage.getPreferences() };
console.error('Error getting OpenAI SDK credentials:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error getting preferences:", error);
} return { success: false, error: error.message };
}); }
});
ipcMain.handle('storage:set-openai-sdk-credentials', async (event, config) => { ipcMain.handle("storage:set-preferences", async (event, preferences) => {
try { try {
storage.setOpenAISDKCredentials(config); storage.setPreferences(preferences);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error setting OpenAI SDK credentials:', error); console.error("Error setting preferences:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ PREFERENCES ============ ipcMain.handle("storage:update-preference", async (event, key, value) => {
ipcMain.handle('storage:get-preferences', async () => { try {
try { storage.updatePreference(key, value);
return { success: true, data: storage.getPreferences() }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error getting preferences:', error); console.error("Error updating preference:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-preferences', async (event, preferences) => { // ============ KEYBINDS ============
try { ipcMain.handle("storage:get-keybinds", async () => {
storage.setPreferences(preferences); try {
return { success: true }; return { success: true, data: storage.getKeybinds() };
} catch (error) { } catch (error) {
console.error('Error setting preferences:', error); console.error("Error getting keybinds:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:update-preference', async (event, key, value) => { ipcMain.handle("storage:set-keybinds", async (event, keybinds) => {
try { try {
storage.updatePreference(key, value); storage.setKeybinds(keybinds);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error updating preference:', error); console.error("Error setting keybinds:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// ============ KEYBINDS ============ // ============ HISTORY ============
ipcMain.handle('storage:get-keybinds', async () => { ipcMain.handle("storage:get-all-sessions", async () => {
try { try {
return { success: true, data: storage.getKeybinds() }; return { success: true, data: storage.getAllSessions() };
} catch (error) { } catch (error) {
console.error('Error getting keybinds:', error); console.error("Error getting sessions:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:set-keybinds', async (event, keybinds) => { ipcMain.handle("storage:get-session", async (event, sessionId) => {
try { try {
storage.setKeybinds(keybinds); return { success: true, data: storage.getSession(sessionId) };
return { success: true }; } catch (error) {
} catch (error) { console.error("Error getting session:", error);
console.error('Error setting keybinds:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
// ============ HISTORY ============ ipcMain.handle("storage:save-session", async (event, sessionId, data) => {
ipcMain.handle('storage:get-all-sessions', async () => { try {
try { storage.saveSession(sessionId, data);
return { success: true, data: storage.getAllSessions() }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error getting sessions:', error); console.error("Error saving session:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:get-session', async (event, sessionId) => { ipcMain.handle("storage:delete-session", async (event, sessionId) => {
try { try {
return { success: true, data: storage.getSession(sessionId) }; storage.deleteSession(sessionId);
} catch (error) { return { success: true };
console.error('Error getting session:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error deleting session:", error);
} return { success: false, error: error.message };
}); }
});
ipcMain.handle('storage:save-session', async (event, sessionId, data) => { ipcMain.handle("storage:delete-all-sessions", async () => {
try { try {
storage.saveSession(sessionId, data); storage.deleteAllSessions();
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('Error saving session:', error); console.error("Error deleting all sessions:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:delete-session', async (event, sessionId) => { // ============ LIMITS ============
try { ipcMain.handle("storage:get-today-limits", async () => {
storage.deleteSession(sessionId); try {
return { success: true }; return { success: true, data: storage.getTodayLimits() };
} catch (error) { } catch (error) {
console.error('Error deleting session:', error); console.error("Error getting today limits:", error);
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
ipcMain.handle('storage:delete-all-sessions', async () => { // ============ CLEAR ALL ============
try { ipcMain.handle("storage:clear-all", async () => {
storage.deleteAllSessions(); try {
return { success: true }; storage.clearAllData();
} catch (error) { return { success: true };
console.error('Error deleting all sessions:', error); } catch (error) {
return { success: false, error: error.message }; console.error("Error clearing all data:", error);
} return { success: false, error: error.message };
}); }
});
// ============ LIMITS ============
ipcMain.handle('storage:get-today-limits', async () => {
try {
return { success: true, data: storage.getTodayLimits() };
} catch (error) {
console.error('Error getting today limits:', error);
return { success: false, error: error.message };
}
});
// ============ CLEAR ALL ============
ipcMain.handle('storage:clear-all', async () => {
try {
storage.clearAllData();
return { success: true };
} catch (error) {
console.error('Error clearing all data:', error);
return { success: false, error: error.message };
}
});
} }
function setupGeneralIpcHandlers() { function setupGeneralIpcHandlers() {
ipcMain.handle('get-app-version', async () => { ipcMain.handle("get-app-version", async () => {
return app.getVersion(); return app.getVersion();
}); });
ipcMain.handle('open-logs-folder', async () => { ipcMain.handle("quit-application", async (event) => {
try { try {
const logPath = getLogPath(); stopMacOSAudioCapture();
const logsDir = require('path').dirname(logPath); app.quit();
await shell.openPath(logsDir); return { success: true };
return { success: true, path: logsDir }; } catch (error) {
} catch (error) { console.error("Error quitting application:", error);
console.error('Error opening logs folder:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
ipcMain.handle('quit-application', async event => { ipcMain.handle("open-external", async (event, url) => {
try { try {
stopMacOSAudioCapture(); await shell.openExternal(url);
app.quit(); return { success: true };
return { success: true }; } catch (error) {
} catch (error) { console.error("Error opening external URL:", error);
console.error('Error quitting application:', error); return { success: false, error: error.message };
return { success: false, error: error.message }; }
} });
});
ipcMain.handle('open-external', async (event, url) => { ipcMain.on("update-keybinds", (event, newKeybinds) => {
try { if (mainWindow) {
await shell.openExternal(url); // Also save to storage
return { success: true }; storage.setKeybinds(newKeybinds);
} catch (error) { updateGlobalShortcuts(
console.error('Error opening external URL:', error); newKeybinds,
return { success: false, error: error.message }; mainWindow,
} sendToRenderer,
}); geminiSessionRef,
);
}
});
ipcMain.on('update-keybinds', (event, newKeybinds) => { // Debug logging from renderer
if (mainWindow) { ipcMain.on("log-message", (event, msg) => {
// Also save to storage console.log(msg);
storage.setKeybinds(newKeybinds); });
updateGlobalShortcuts(newKeybinds, mainWindow, sendToRenderer, geminiSessionRef);
}
});
// Debug logging from renderer
ipcMain.on('log-message', (event, msg) => {
console.log(msg);
});
} }

View File

@ -1,508 +1,574 @@
const fs = require('fs'); const fs = require("fs");
const path = require('path'); const path = require("path");
const os = require('os'); const os = require("os");
const CONFIG_VERSION = 1; const CONFIG_VERSION = 1;
// Default values // Default values
const DEFAULT_CONFIG = { const DEFAULT_CONFIG = {
configVersion: CONFIG_VERSION, configVersion: CONFIG_VERSION,
onboarded: false, onboarded: false,
layout: 'normal' layout: "normal",
}; };
const DEFAULT_CREDENTIALS = { const DEFAULT_CREDENTIALS = {
apiKey: '', apiKey: "",
// OpenAI Realtime API settings groqApiKey: "",
openaiApiKey: '', openaiCompatibleApiKey: "",
openaiBaseUrl: '', openaiCompatibleBaseUrl: "",
openaiModel: 'gpt-4o-realtime-preview-2024-12-17', openaiCompatibleModel: "",
// OpenAI SDK settings (for BotHub and other providers)
openaiSdkApiKey: '',
openaiSdkBaseUrl: '',
openaiSdkModel: 'gpt-4o',
openaiSdkVisionModel: 'gpt-4o',
openaiSdkWhisperModel: 'whisper-1'
}; };
const DEFAULT_PREFERENCES = { const DEFAULT_PREFERENCES = {
customPrompt: '', customPrompt: "",
selectedProfile: 'interview', selectedProfile: "interview",
selectedLanguage: 'en-US', selectedLanguage: "en-US",
selectedScreenshotInterval: '5', selectedScreenshotInterval: "5",
selectedImageQuality: 'medium', selectedImageQuality: "medium",
advancedMode: false, advancedMode: false,
audioMode: 'speaker_only', audioMode: "speaker_only",
fontSize: 'medium', fontSize: "medium",
backgroundTransparency: 0.8, backgroundTransparency: 0.8,
googleSearchEnabled: false, googleSearchEnabled: false,
aiProvider: 'gemini' responseProvider: "gemini",
ollamaHost: "http://127.0.0.1:11434",
ollamaModel: "llama3.1",
whisperModel: "Xenova/whisper-small",
whisperDevice: "", // '' = auto-detect, 'cpu' = native, 'wasm' = compatible
}; };
const DEFAULT_KEYBINDS = null; // null means use system defaults const DEFAULT_KEYBINDS = null; // null means use system defaults
const DEFAULT_LIMITS = { const DEFAULT_LIMITS = {
data: [] // Array of { date: 'YYYY-MM-DD', flash: { count: 0 }, flashLite: { count: 0 } } data: [], // Array of { date: 'YYYY-MM-DD', flash: { count }, flashLite: { count }, groq: { 'qwen3-32b': { chars, limit }, 'gpt-oss-120b': { chars, limit }, 'gpt-oss-20b': { chars, limit } }, gemini: { 'gemma-3-27b-it': { chars } } }
}; };
// Get the config directory path based on OS // Get the config directory path based on OS
function getConfigDir() { function getConfigDir() {
const platform = os.platform(); const platform = os.platform();
let configDir; let configDir;
if (platform === 'win32') { if (platform === "win32") {
configDir = path.join(os.homedir(), 'AppData', 'Roaming', 'cheating-daddy-config'); configDir = path.join(
} else if (platform === 'darwin') { os.homedir(),
configDir = path.join(os.homedir(), 'Library', 'Application Support', 'cheating-daddy-config'); "AppData",
} else { "Roaming",
configDir = path.join(os.homedir(), '.config', 'cheating-daddy-config'); "cheating-daddy-config",
} );
} else if (platform === "darwin") {
configDir = path.join(
os.homedir(),
"Library",
"Application Support",
"cheating-daddy-config",
);
} else {
configDir = path.join(os.homedir(), ".config", "cheating-daddy-config");
}
return configDir; return configDir;
} }
// File paths // File paths
function getConfigPath() { function getConfigPath() {
return path.join(getConfigDir(), 'config.json'); return path.join(getConfigDir(), "config.json");
} }
function getCredentialsPath() { function getCredentialsPath() {
return path.join(getConfigDir(), 'credentials.json'); return path.join(getConfigDir(), "credentials.json");
} }
function getPreferencesPath() { function getPreferencesPath() {
return path.join(getConfigDir(), 'preferences.json'); return path.join(getConfigDir(), "preferences.json");
} }
function getKeybindsPath() { function getKeybindsPath() {
return path.join(getConfigDir(), 'keybinds.json'); return path.join(getConfigDir(), "keybinds.json");
} }
function getLimitsPath() { function getLimitsPath() {
return path.join(getConfigDir(), 'limits.json'); return path.join(getConfigDir(), "limits.json");
} }
function getHistoryDir() { function getHistoryDir() {
return path.join(getConfigDir(), 'history'); return path.join(getConfigDir(), "history");
} }
// Helper to read JSON file safely // Helper to read JSON file safely
function readJsonFile(filePath, defaultValue) { function readJsonFile(filePath, defaultValue) {
try { try {
if (fs.existsSync(filePath)) { if (fs.existsSync(filePath)) {
const data = fs.readFileSync(filePath, 'utf8'); const data = fs.readFileSync(filePath, "utf8");
return JSON.parse(data); return JSON.parse(data);
}
} catch (error) {
console.warn(`Error reading ${filePath}:`, error.message);
} }
return defaultValue; } catch (error) {
console.warn(`Error reading ${filePath}:`, error.message);
}
return defaultValue;
} }
// Helper to write JSON file safely // Helper to write JSON file safely
function writeJsonFile(filePath, data) { function writeJsonFile(filePath, data) {
try { try {
const dir = path.dirname(filePath); const dir = path.dirname(filePath);
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true }); fs.mkdirSync(dir, { recursive: true });
}
fs.writeFileSync(filePath, JSON.stringify(data, null, 2), 'utf8');
return true;
} catch (error) {
console.error(`Error writing ${filePath}:`, error.message);
return false;
} }
fs.writeFileSync(filePath, JSON.stringify(data, null, 2), "utf8");
return true;
} catch (error) {
console.error(`Error writing ${filePath}:`, error.message);
return false;
}
} }
// Check if we need to reset (no configVersion or wrong version) // Check if we need to reset (no configVersion or wrong version)
function needsReset() { function needsReset() {
const configPath = getConfigPath(); const configPath = getConfigPath();
if (!fs.existsSync(configPath)) { if (!fs.existsSync(configPath)) {
return true; return true;
} }
try { try {
const config = JSON.parse(fs.readFileSync(configPath, 'utf8')); const config = JSON.parse(fs.readFileSync(configPath, "utf8"));
return !config.configVersion || config.configVersion !== CONFIG_VERSION; return !config.configVersion || config.configVersion !== CONFIG_VERSION;
} catch { } catch {
return true; return true;
} }
} }
// Wipe and reinitialize the config directory // Wipe and reinitialize the config directory
function resetConfigDir() { function resetConfigDir() {
const configDir = getConfigDir(); const configDir = getConfigDir();
console.log('Resetting config directory...'); console.log("Resetting config directory...");
// Remove existing directory if it exists // Remove existing directory if it exists
if (fs.existsSync(configDir)) { if (fs.existsSync(configDir)) {
fs.rmSync(configDir, { recursive: true, force: true }); fs.rmSync(configDir, { recursive: true, force: true });
} }
// Create fresh directory structure // Create fresh directory structure
fs.mkdirSync(configDir, { recursive: true }); fs.mkdirSync(configDir, { recursive: true });
fs.mkdirSync(getHistoryDir(), { recursive: true }); fs.mkdirSync(getHistoryDir(), { recursive: true });
// Initialize with defaults // Initialize with defaults
writeJsonFile(getConfigPath(), DEFAULT_CONFIG); writeJsonFile(getConfigPath(), DEFAULT_CONFIG);
writeJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS); writeJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS);
writeJsonFile(getPreferencesPath(), DEFAULT_PREFERENCES); writeJsonFile(getPreferencesPath(), DEFAULT_PREFERENCES);
console.log('Config directory initialized with defaults'); console.log("Config directory initialized with defaults");
} }
// Initialize storage - call this on app startup // Initialize storage - call this on app startup
function initializeStorage() { function initializeStorage() {
if (needsReset()) { if (needsReset()) {
resetConfigDir(); resetConfigDir();
} else { } else {
// Ensure history directory exists // Ensure history directory exists
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
if (!fs.existsSync(historyDir)) { if (!fs.existsSync(historyDir)) {
fs.mkdirSync(historyDir, { recursive: true }); fs.mkdirSync(historyDir, { recursive: true });
}
} }
}
} }
// ============ CONFIG ============ // ============ CONFIG ============
function getConfig() { function getConfig() {
return readJsonFile(getConfigPath(), DEFAULT_CONFIG); return readJsonFile(getConfigPath(), DEFAULT_CONFIG);
} }
function setConfig(config) { function setConfig(config) {
const current = getConfig(); const current = getConfig();
const updated = { ...current, ...config, configVersion: CONFIG_VERSION }; const updated = { ...current, ...config, configVersion: CONFIG_VERSION };
return writeJsonFile(getConfigPath(), updated); return writeJsonFile(getConfigPath(), updated);
} }
function updateConfig(key, value) { function updateConfig(key, value) {
const config = getConfig(); const config = getConfig();
config[key] = value; config[key] = value;
return writeJsonFile(getConfigPath(), config); return writeJsonFile(getConfigPath(), config);
} }
// ============ CREDENTIALS ============ // ============ CREDENTIALS ============
function getCredentials() { function getCredentials() {
return readJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS); return readJsonFile(getCredentialsPath(), DEFAULT_CREDENTIALS);
} }
function setCredentials(credentials) { function setCredentials(credentials) {
const current = getCredentials(); const current = getCredentials();
const updated = { ...current, ...credentials }; const updated = { ...current, ...credentials };
return writeJsonFile(getCredentialsPath(), updated); return writeJsonFile(getCredentialsPath(), updated);
} }
function getApiKey() { function getApiKey() {
return getCredentials().apiKey || ''; return getCredentials().apiKey || "";
} }
function setApiKey(apiKey) { function setApiKey(apiKey) {
return setCredentials({ apiKey }); return setCredentials({ apiKey });
} }
function getOpenAICredentials() { function getGroqApiKey() {
const creds = getCredentials(); return getCredentials().groqApiKey || "";
return {
apiKey: creds.openaiApiKey || '',
baseUrl: creds.openaiBaseUrl || '',
model: creds.openaiModel || 'gpt-4o-realtime-preview-2024-12-17'
};
} }
function setOpenAICredentials(config) { function setGroqApiKey(groqApiKey) {
const updates = {}; return setCredentials({ groqApiKey });
if (config.apiKey !== undefined) updates.openaiApiKey = config.apiKey;
if (config.baseUrl !== undefined) updates.openaiBaseUrl = config.baseUrl;
if (config.model !== undefined) updates.openaiModel = config.model;
return setCredentials(updates);
} }
function getOpenAISDKCredentials() { function getOpenAICompatibleConfig() {
const creds = getCredentials(); const creds = getCredentials();
return { return {
apiKey: creds.openaiSdkApiKey || '', apiKey: creds.openaiCompatibleApiKey || "",
baseUrl: creds.openaiSdkBaseUrl || '', baseUrl: creds.openaiCompatibleBaseUrl || "",
model: creds.openaiSdkModel || 'gpt-4o', model: creds.openaiCompatibleModel || "",
visionModel: creds.openaiSdkVisionModel || 'gpt-4o', };
whisperModel: creds.openaiSdkWhisperModel || 'whisper-1'
};
} }
function setOpenAISDKCredentials(config) { function setOpenAICompatibleConfig(apiKey, baseUrl, model) {
const updates = {}; return setCredentials({
if (config.apiKey !== undefined) updates.openaiSdkApiKey = config.apiKey; openaiCompatibleApiKey: apiKey,
if (config.baseUrl !== undefined) updates.openaiSdkBaseUrl = config.baseUrl; openaiCompatibleBaseUrl: baseUrl,
if (config.model !== undefined) updates.openaiSdkModel = config.model; openaiCompatibleModel: model,
if (config.visionModel !== undefined) updates.openaiSdkVisionModel = config.visionModel; });
if (config.whisperModel !== undefined) updates.openaiSdkWhisperModel = config.whisperModel;
return setCredentials(updates);
} }
// ============ PREFERENCES ============ // ============ PREFERENCES ============
function getPreferences() { function getPreferences() {
const saved = readJsonFile(getPreferencesPath(), {}); const saved = readJsonFile(getPreferencesPath(), {});
return { ...DEFAULT_PREFERENCES, ...saved }; return { ...DEFAULT_PREFERENCES, ...saved };
} }
function setPreferences(preferences) { function setPreferences(preferences) {
const current = getPreferences(); const current = getPreferences();
const updated = { ...current, ...preferences }; const updated = { ...current, ...preferences };
return writeJsonFile(getPreferencesPath(), updated); return writeJsonFile(getPreferencesPath(), updated);
} }
function updatePreference(key, value) { function updatePreference(key, value) {
const preferences = getPreferences(); const preferences = getPreferences();
preferences[key] = value; preferences[key] = value;
return writeJsonFile(getPreferencesPath(), preferences); return writeJsonFile(getPreferencesPath(), preferences);
} }
// ============ KEYBINDS ============ // ============ KEYBINDS ============
function getKeybinds() { function getKeybinds() {
return readJsonFile(getKeybindsPath(), DEFAULT_KEYBINDS); return readJsonFile(getKeybindsPath(), DEFAULT_KEYBINDS);
} }
function setKeybinds(keybinds) { function setKeybinds(keybinds) {
return writeJsonFile(getKeybindsPath(), keybinds); return writeJsonFile(getKeybindsPath(), keybinds);
} }
// ============ LIMITS (Rate Limiting) ============ // ============ LIMITS (Rate Limiting) ============
function getLimits() { function getLimits() {
return readJsonFile(getLimitsPath(), DEFAULT_LIMITS); return readJsonFile(getLimitsPath(), DEFAULT_LIMITS);
} }
function setLimits(limits) { function setLimits(limits) {
return writeJsonFile(getLimitsPath(), limits); return writeJsonFile(getLimitsPath(), limits);
} }
function getTodayDateString() { function getTodayDateString() {
const now = new Date(); const now = new Date();
return now.toISOString().split('T')[0]; // YYYY-MM-DD return now.toISOString().split("T")[0]; // YYYY-MM-DD
} }
function getTodayLimits() { function getTodayLimits() {
const limits = getLimits(); const limits = getLimits();
const today = getTodayDateString(); const today = getTodayDateString();
// Find today's entry // Find today's entry
const todayEntry = limits.data.find(entry => entry.date === today); const todayEntry = limits.data.find((entry) => entry.date === today);
if (todayEntry) { if (todayEntry) {
return todayEntry; // ensure new fields exist
if (!todayEntry.groq) {
todayEntry.groq = {
"qwen3-32b": { chars: 0, limit: 1500000 },
"gpt-oss-120b": { chars: 0, limit: 600000 },
"gpt-oss-20b": { chars: 0, limit: 600000 },
"kimi-k2-instruct": { chars: 0, limit: 600000 },
};
}
if (!todayEntry.gemini) {
todayEntry.gemini = {
"gemma-3-27b-it": { chars: 0 },
};
} }
// No entry for today - clean old entries and create new one
limits.data = limits.data.filter(entry => entry.date === today);
const newEntry = {
date: today,
flash: { count: 0 },
flashLite: { count: 0 }
};
limits.data.push(newEntry);
setLimits(limits); setLimits(limits);
return todayEntry;
}
return newEntry; // No entry for today - clean old entries and create new one
limits.data = limits.data.filter((entry) => entry.date === today);
const newEntry = {
date: today,
flash: { count: 0 },
flashLite: { count: 0 },
groq: {
"qwen3-32b": { chars: 0, limit: 1500000 },
"gpt-oss-120b": { chars: 0, limit: 600000 },
"gpt-oss-20b": { chars: 0, limit: 600000 },
"kimi-k2-instruct": { chars: 0, limit: 600000 },
},
gemini: {
"gemma-3-27b-it": { chars: 0 },
},
};
limits.data.push(newEntry);
setLimits(limits);
return newEntry;
} }
function incrementLimitCount(model) { function incrementLimitCount(model) {
const limits = getLimits(); const limits = getLimits();
const today = getTodayDateString(); const today = getTodayDateString();
// Find or create today's entry // Find or create today's entry
let todayEntry = limits.data.find(entry => entry.date === today); let todayEntry = limits.data.find((entry) => entry.date === today);
if (!todayEntry) { if (!todayEntry) {
// Clean old entries and create new one // Clean old entries and create new one
limits.data = []; limits.data = [];
todayEntry = { todayEntry = {
date: today, date: today,
flash: { count: 0 }, flash: { count: 0 },
flashLite: { count: 0 } flashLite: { count: 0 },
}; };
limits.data.push(todayEntry); limits.data.push(todayEntry);
} else { } else {
// Clean old entries, keep only today // Clean old entries, keep only today
limits.data = limits.data.filter(entry => entry.date === today); limits.data = limits.data.filter((entry) => entry.date === today);
} }
// Increment the appropriate model count // Increment the appropriate model count
if (model === 'gemini-2.5-flash') { if (model === "gemini-2.5-flash") {
todayEntry.flash.count++; todayEntry.flash.count++;
} else if (model === 'gemini-2.5-flash-lite') { } else if (model === "gemini-2.5-flash-lite") {
todayEntry.flashLite.count++; todayEntry.flashLite.count++;
} }
setLimits(limits);
return todayEntry;
}
function incrementCharUsage(provider, model, charCount) {
getTodayLimits();
const limits = getLimits();
const today = getTodayDateString();
const todayEntry = limits.data.find((entry) => entry.date === today);
if (todayEntry[provider] && todayEntry[provider][model]) {
todayEntry[provider][model].chars += charCount;
setLimits(limits); setLimits(limits);
return todayEntry; }
return todayEntry;
} }
function getAvailableModel() { function getAvailableModel() {
const todayLimits = getTodayLimits(); const todayLimits = getTodayLimits();
// RPD limits: flash = 20, flash-lite = 20 // RPD limits: flash = 20, flash-lite = 20
// After both exhausted, fall back to flash (for paid API users) // After both exhausted, fall back to flash (for paid API users)
if (todayLimits.flash.count < 20) { if (todayLimits.flash.count < 20) {
return 'gemini-2.5-flash'; return "gemini-2.5-flash";
} else if (todayLimits.flashLite.count < 20) { } else if (todayLimits.flashLite.count < 20) {
return 'gemini-2.5-flash-lite'; return "gemini-2.5-flash-lite";
} }
return 'gemini-2.5-flash'; // Default to flash for paid API users return "gemini-2.5-flash"; // Default to flash for paid API users
}
function getModelForToday() {
const todayEntry = getTodayLimits();
const groq = todayEntry.groq;
if (groq["qwen3-32b"].chars < groq["qwen3-32b"].limit) {
return "qwen/qwen3-32b";
}
if (groq["gpt-oss-120b"].chars < groq["gpt-oss-120b"].limit) {
return "openai/gpt-oss-120b";
}
if (groq["gpt-oss-20b"].chars < groq["gpt-oss-20b"].limit) {
return "openai/gpt-oss-20b";
}
if (groq["kimi-k2-instruct"].chars < groq["kimi-k2-instruct"].limit) {
return "moonshotai/kimi-k2-instruct";
}
// All limits exhausted
return null;
} }
// ============ HISTORY ============ // ============ HISTORY ============
function getSessionPath(sessionId) { function getSessionPath(sessionId) {
return path.join(getHistoryDir(), `${sessionId}.json`); return path.join(getHistoryDir(), `${sessionId}.json`);
} }
function saveSession(sessionId, data) { function saveSession(sessionId, data) {
const sessionPath = getSessionPath(sessionId); const sessionPath = getSessionPath(sessionId);
// Load existing session to preserve metadata // Load existing session to preserve metadata
const existingSession = readJsonFile(sessionPath, null); const existingSession = readJsonFile(sessionPath, null);
const sessionData = { const sessionData = {
sessionId, sessionId,
createdAt: existingSession?.createdAt || parseInt(sessionId), createdAt: existingSession?.createdAt || parseInt(sessionId),
lastUpdated: Date.now(), lastUpdated: Date.now(),
// Profile context - set once when session starts // Profile context - set once when session starts
profile: data.profile || existingSession?.profile || null, profile: data.profile || existingSession?.profile || null,
customPrompt: data.customPrompt || existingSession?.customPrompt || null, customPrompt: data.customPrompt || existingSession?.customPrompt || null,
// Conversation data // Conversation data
conversationHistory: data.conversationHistory || existingSession?.conversationHistory || [], conversationHistory:
screenAnalysisHistory: data.screenAnalysisHistory || existingSession?.screenAnalysisHistory || [] data.conversationHistory || existingSession?.conversationHistory || [],
}; screenAnalysisHistory:
return writeJsonFile(sessionPath, sessionData); data.screenAnalysisHistory ||
existingSession?.screenAnalysisHistory ||
[],
};
return writeJsonFile(sessionPath, sessionData);
} }
function getSession(sessionId) { function getSession(sessionId) {
return readJsonFile(getSessionPath(sessionId), null); return readJsonFile(getSessionPath(sessionId), null);
} }
function getAllSessions() { function getAllSessions() {
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
try { try {
if (!fs.existsSync(historyDir)) { if (!fs.existsSync(historyDir)) {
return []; return [];
}
const files = fs.readdirSync(historyDir)
.filter(f => f.endsWith('.json'))
.sort((a, b) => {
// Sort by timestamp descending (newest first)
const tsA = parseInt(a.replace('.json', ''));
const tsB = parseInt(b.replace('.json', ''));
return tsB - tsA;
});
return files.map(file => {
const sessionId = file.replace('.json', '');
const data = readJsonFile(path.join(historyDir, file), null);
if (data) {
return {
sessionId,
createdAt: data.createdAt,
lastUpdated: data.lastUpdated,
messageCount: data.conversationHistory?.length || 0,
screenAnalysisCount: data.screenAnalysisHistory?.length || 0,
profile: data.profile || null,
customPrompt: data.customPrompt || null
};
}
return null;
}).filter(Boolean);
} catch (error) {
console.error('Error reading sessions:', error.message);
return [];
} }
const files = fs
.readdirSync(historyDir)
.filter((f) => f.endsWith(".json"))
.sort((a, b) => {
// Sort by timestamp descending (newest first)
const tsA = parseInt(a.replace(".json", ""));
const tsB = parseInt(b.replace(".json", ""));
return tsB - tsA;
});
return files
.map((file) => {
const sessionId = file.replace(".json", "");
const data = readJsonFile(path.join(historyDir, file), null);
if (data) {
return {
sessionId,
createdAt: data.createdAt,
lastUpdated: data.lastUpdated,
messageCount: data.conversationHistory?.length || 0,
screenAnalysisCount: data.screenAnalysisHistory?.length || 0,
profile: data.profile || null,
customPrompt: data.customPrompt || null,
};
}
return null;
})
.filter(Boolean);
} catch (error) {
console.error("Error reading sessions:", error.message);
return [];
}
} }
function deleteSession(sessionId) { function deleteSession(sessionId) {
const sessionPath = getSessionPath(sessionId); const sessionPath = getSessionPath(sessionId);
try { try {
if (fs.existsSync(sessionPath)) { if (fs.existsSync(sessionPath)) {
fs.unlinkSync(sessionPath); fs.unlinkSync(sessionPath);
return true; return true;
}
} catch (error) {
console.error('Error deleting session:', error.message);
} }
return false; } catch (error) {
console.error("Error deleting session:", error.message);
}
return false;
} }
function deleteAllSessions() { function deleteAllSessions() {
const historyDir = getHistoryDir(); const historyDir = getHistoryDir();
try { try {
if (fs.existsSync(historyDir)) { if (fs.existsSync(historyDir)) {
const files = fs.readdirSync(historyDir).filter(f => f.endsWith('.json')); const files = fs
files.forEach(file => { .readdirSync(historyDir)
fs.unlinkSync(path.join(historyDir, file)); .filter((f) => f.endsWith(".json"));
}); files.forEach((file) => {
} fs.unlinkSync(path.join(historyDir, file));
return true; });
} catch (error) {
console.error('Error deleting all sessions:', error.message);
return false;
} }
return true;
} catch (error) {
console.error("Error deleting all sessions:", error.message);
return false;
}
} }
// ============ CLEAR ALL DATA ============ // ============ CLEAR ALL DATA ============
function clearAllData() { function clearAllData() {
resetConfigDir(); resetConfigDir();
return true; return true;
} }
module.exports = { module.exports = {
// Initialization // Initialization
initializeStorage, initializeStorage,
getConfigDir, getConfigDir,
// Config // Config
getConfig, getConfig,
setConfig, setConfig,
updateConfig, updateConfig,
// Credentials // Credentials
getCredentials, getCredentials,
setCredentials, setCredentials,
getApiKey, getApiKey,
setApiKey, setApiKey,
getOpenAICredentials, getGroqApiKey,
setOpenAICredentials, setGroqApiKey,
getOpenAISDKCredentials, getOpenAICompatibleConfig,
setOpenAISDKCredentials, setOpenAICompatibleConfig,
// Preferences // Preferences
getPreferences, getPreferences,
setPreferences, setPreferences,
updatePreference, updatePreference,
// Keybinds // Keybinds
getKeybinds, getKeybinds,
setKeybinds, setKeybinds,
// Limits (Rate Limiting) // Limits (Rate Limiting)
getLimits, getLimits,
setLimits, setLimits,
getTodayLimits, getTodayLimits,
incrementLimitCount, incrementLimitCount,
getAvailableModel, getAvailableModel,
incrementCharUsage,
getModelForToday,
// History // History
saveSession, saveSession,
getSession, getSession,
getAllSessions, getAllSessions,
deleteSession, deleteSession,
deleteAllSessions, deleteAllSessions,
// Clear all // Clear all
clearAllData clearAllData,
}; };

View File

@ -1,453 +0,0 @@
const { BrowserWindow, ipcMain } = require('electron');
const { getSystemPrompt } = require('./prompts');
const { getAvailableModel, incrementLimitCount, getApiKey, getOpenAICredentials, getOpenAISDKCredentials, getPreferences } = require('../storage');
// Import provider implementations
const geminiProvider = require('./gemini');
const openaiRealtimeProvider = require('./openai-realtime');
const openaiSdkProvider = require('./openai-sdk');
// Conversation tracking (shared across providers)
let currentSessionId = null;
let conversationHistory = [];
let screenAnalysisHistory = [];
let currentProfile = null;
let currentCustomPrompt = null;
let currentProvider = 'gemini'; // 'gemini', 'openai-realtime', or 'openai-sdk'
let providerConfig = {};
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function initializeNewSession(profile = null, customPrompt = null) {
currentSessionId = Date.now().toString();
conversationHistory = [];
screenAnalysisHistory = [];
currentProfile = profile;
currentCustomPrompt = customPrompt;
console.log('New conversation session started:', currentSessionId, 'profile:', profile, 'provider:', currentProvider);
if (profile) {
sendToRenderer('save-session-context', {
sessionId: currentSessionId,
profile: profile,
customPrompt: customPrompt || '',
provider: currentProvider,
});
}
}
function saveConversationTurn(transcription, aiResponse) {
if (!currentSessionId) {
initializeNewSession();
}
const conversationTurn = {
timestamp: Date.now(),
transcription: transcription.trim(),
ai_response: aiResponse.trim(),
};
conversationHistory.push(conversationTurn);
console.log('Saved conversation turn:', conversationTurn);
sendToRenderer('save-conversation-turn', {
sessionId: currentSessionId,
turn: conversationTurn,
fullHistory: conversationHistory,
});
}
function saveScreenAnalysis(prompt, response, model) {
if (!currentSessionId) {
initializeNewSession();
}
const analysisEntry = {
timestamp: Date.now(),
prompt: prompt,
response: response.trim(),
model: model,
provider: currentProvider,
};
screenAnalysisHistory.push(analysisEntry);
console.log('Saved screen analysis:', analysisEntry);
sendToRenderer('save-screen-analysis', {
sessionId: currentSessionId,
analysis: analysisEntry,
fullHistory: screenAnalysisHistory,
profile: currentProfile,
customPrompt: currentCustomPrompt,
});
}
function getCurrentSessionData() {
return {
sessionId: currentSessionId,
history: conversationHistory,
provider: currentProvider,
};
}
// Get provider configuration from storage
async function getStoredSetting(key, defaultValue) {
try {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
await new Promise(resolve => setTimeout(resolve, 100));
const value = await windows[0].webContents.executeJavaScript(`
(function() {
try {
if (typeof localStorage === 'undefined') {
return '${defaultValue}';
}
const stored = localStorage.getItem('${key}');
return stored || '${defaultValue}';
} catch (e) {
return '${defaultValue}';
}
})()
`);
return value;
}
} catch (error) {
console.error('Error getting stored setting for', key, ':', error.message);
}
return defaultValue;
}
// Initialize AI session based on selected provider
async function initializeAISession(customPrompt = '', profile = 'interview', language = 'en-US') {
// Read provider from file-based storage (preferences.json)
const prefs = getPreferences();
const provider = prefs.aiProvider || 'gemini';
currentProvider = provider;
console.log('Initializing AI session with provider:', provider);
// Check if Google Search is enabled for system prompt
const googleSearchEnabled = prefs.googleSearchEnabled ?? true;
const systemPrompt = getSystemPrompt(profile, customPrompt, googleSearchEnabled);
if (provider === 'openai-realtime') {
// Get OpenAI Realtime configuration
const creds = getOpenAICredentials();
if (!creds.apiKey) {
sendToRenderer('update-status', 'OpenAI API key not configured');
return false;
}
providerConfig = {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl || null,
model: creds.model,
systemPrompt,
language,
isReconnect: false,
};
initializeNewSession(profile, customPrompt);
try {
await openaiRealtimeProvider.initializeOpenAISession(providerConfig, conversationHistory);
return true;
} catch (error) {
console.error('Failed to initialize OpenAI Realtime session:', error);
sendToRenderer('update-status', 'Failed to connect to OpenAI Realtime');
return false;
}
} else if (provider === 'openai-sdk') {
// Get OpenAI SDK configuration (for BotHub, etc.)
const creds = getOpenAISDKCredentials();
if (!creds.apiKey) {
sendToRenderer('update-status', 'OpenAI SDK API key not configured');
return false;
}
providerConfig = {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl || null,
model: creds.model,
visionModel: creds.visionModel,
whisperModel: creds.whisperModel,
};
initializeNewSession(profile, customPrompt);
try {
await openaiSdkProvider.initializeOpenAISDK(providerConfig);
openaiSdkProvider.setSystemPrompt(systemPrompt);
sendToRenderer('update-status', 'Ready (OpenAI SDK)');
return true;
} catch (error) {
console.error('Failed to initialize OpenAI SDK:', error);
sendToRenderer('update-status', 'Failed to initialize OpenAI SDK: ' + error.message);
return false;
}
} else {
// Use Gemini (default)
const apiKey = getApiKey();
if (!apiKey) {
sendToRenderer('update-status', 'Gemini API key not configured');
return false;
}
const session = await geminiProvider.initializeGeminiSession(apiKey, customPrompt, profile, language);
if (session && global.geminiSessionRef) {
global.geminiSessionRef.current = session;
return true;
}
return false;
}
}
// Send audio to appropriate provider
async function sendAudioContent(data, mimeType, isSystemAudio = true) {
if (currentProvider === 'openai-realtime') {
return await openaiRealtimeProvider.sendAudioToOpenAI(data);
} else if (currentProvider === 'openai-sdk') {
// OpenAI SDK buffers audio and transcribes on flush
return await openaiSdkProvider.processAudioChunk(data, mimeType);
} else {
// Gemini
if (!global.geminiSessionRef?.current) {
return { success: false, error: 'No active Gemini session' };
}
try {
const marker = isSystemAudio ? '.' : ',';
process.stdout.write(marker);
await global.geminiSessionRef.current.sendRealtimeInput({
audio: { data, mimeType },
});
return { success: true };
} catch (error) {
console.error('Error sending audio to Gemini:', error);
return { success: false, error: error.message };
}
}
}
// Send image to appropriate provider
async function sendImageContent(data, prompt) {
if (currentProvider === 'openai-realtime') {
const creds = getOpenAICredentials();
const result = await openaiRealtimeProvider.sendImageToOpenAI(data, prompt, {
apiKey: creds.apiKey,
baseUrl: creds.baseUrl,
model: creds.model,
});
if (result.success) {
saveScreenAnalysis(prompt, result.text, result.model);
}
return result;
} else if (currentProvider === 'openai-sdk') {
const result = await openaiSdkProvider.sendImageMessage(data, prompt);
if (result.success) {
saveScreenAnalysis(prompt, result.text, result.model);
}
return result;
} else {
// Use Gemini HTTP API
const result = await geminiProvider.sendImageToGeminiHttp(data, prompt);
// Screen analysis is saved inside sendImageToGeminiHttp for Gemini
return result;
}
}
// Send text message to appropriate provider
async function sendTextMessage(text) {
if (currentProvider === 'openai-realtime') {
return await openaiRealtimeProvider.sendTextToOpenAI(text);
} else if (currentProvider === 'openai-sdk') {
const result = await openaiSdkProvider.sendTextMessage(text);
if (result.success && result.text) {
saveConversationTurn(text, result.text);
}
return result;
} else {
// Gemini
if (!global.geminiSessionRef?.current) {
return { success: false, error: 'No active Gemini session' };
}
try {
console.log('Sending text message to Gemini:', text);
await global.geminiSessionRef.current.sendRealtimeInput({ text: text.trim() });
return { success: true };
} catch (error) {
console.error('Error sending text to Gemini:', error);
return { success: false, error: error.message };
}
}
}
// Close session for appropriate provider
async function closeSession() {
try {
if (currentProvider === 'openai-realtime') {
openaiRealtimeProvider.closeOpenAISession();
} else if (currentProvider === 'openai-sdk') {
openaiSdkProvider.closeOpenAISDK();
} else {
geminiProvider.stopMacOSAudioCapture();
if (global.geminiSessionRef?.current) {
await global.geminiSessionRef.current.close();
global.geminiSessionRef.current = null;
}
}
return { success: true };
} catch (error) {
console.error('Error closing session:', error);
return { success: false, error: error.message };
}
}
// Setup IPC handlers
function setupAIProviderIpcHandlers(geminiSessionRef) {
// Store reference for Gemini
global.geminiSessionRef = geminiSessionRef;
// Listen for conversation turn save requests from providers
ipcMain.on('save-conversation-turn-data', (event, { transcription, response }) => {
saveConversationTurn(transcription, response);
});
ipcMain.handle('initialize-ai-session', async (event, customPrompt, profile, language) => {
return await initializeAISession(customPrompt, profile, language);
});
ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => {
return await sendAudioContent(data, mimeType, true);
});
ipcMain.handle('send-mic-audio-content', async (event, { data, mimeType }) => {
return await sendAudioContent(data, mimeType, false);
});
ipcMain.handle('send-image-content', async (event, { data, prompt }) => {
return await sendImageContent(data, prompt);
});
ipcMain.handle('send-text-message', async (event, text) => {
return await sendTextMessage(text);
});
ipcMain.handle('close-session', async event => {
return await closeSession();
});
// macOS system audio
ipcMain.handle('start-macos-audio', async event => {
if (process.platform !== 'darwin') {
return {
success: false,
error: 'macOS audio capture only available on macOS',
};
}
try {
if (currentProvider === 'gemini') {
const success = await geminiProvider.startMacOSAudioCapture(global.geminiSessionRef);
return { success };
} else if (currentProvider === 'openai-sdk') {
const success = await openaiSdkProvider.startMacOSAudioCapture();
return { success };
} else if (currentProvider === 'openai-realtime') {
// OpenAI Realtime uses WebSocket, handle differently if needed
return {
success: false,
error: 'OpenAI Realtime uses WebSocket for audio',
};
}
return {
success: false,
error: 'Unknown provider: ' + currentProvider,
};
} catch (error) {
console.error('Error starting macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('stop-macos-audio', async event => {
try {
if (currentProvider === 'gemini') {
geminiProvider.stopMacOSAudioCapture();
} else if (currentProvider === 'openai-sdk') {
openaiSdkProvider.stopMacOSAudioCapture();
}
return { success: true };
} catch (error) {
console.error('Error stopping macOS audio capture:', error);
return { success: false, error: error.message };
}
});
// Session management
ipcMain.handle('get-current-session', async event => {
try {
return { success: true, data: getCurrentSessionData() };
} catch (error) {
console.error('Error getting current session:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('start-new-session', async event => {
try {
initializeNewSession();
return { success: true, sessionId: currentSessionId };
} catch (error) {
console.error('Error starting new session:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('update-google-search-setting', async (event, enabled) => {
try {
console.log('Google Search setting updated to:', enabled);
return { success: true };
} catch (error) {
console.error('Error updating Google Search setting:', error);
return { success: false, error: error.message };
}
});
// Provider switching
ipcMain.handle('switch-ai-provider', async (event, provider) => {
try {
console.log('Switching AI provider to:', provider);
currentProvider = provider;
return { success: true };
} catch (error) {
console.error('Error switching provider:', error);
return { success: false, error: error.message };
}
});
}
module.exports = {
setupAIProviderIpcHandlers,
initializeAISession,
sendAudioContent,
sendImageContent,
sendTextMessage,
closeSession,
getCurrentSessionData,
initializeNewSession,
saveConversationTurn,
};

File diff suppressed because it is too large Load Diff

854
src/utils/localai.js Normal file
View File

@ -0,0 +1,854 @@
const { Ollama } = require("ollama");
const { getSystemPrompt } = require("./prompts");
const {
sendToRenderer,
initializeNewSession,
saveConversationTurn,
} = require("./gemini");
const { fork } = require("child_process");
const path = require("path");
const { getSystemNode } = require("./nodeDetect");
// ── State ──
let ollamaClient = null;
let ollamaModel = null;
let whisperWorker = null;
let isWhisperLoading = false;
let whisperReady = false;
let localConversationHistory = [];
let currentSystemPrompt = null;
let isLocalActive = false;
// Set when we intentionally kill the worker to suppress crash handling
let whisperShuttingDown = false;
// Pending transcription callback (one at a time)
let pendingTranscribe = null;
// VAD state
let isSpeaking = false;
let speechBuffers = [];
let silenceFrameCount = 0;
let speechFrameCount = 0;
// VAD configuration
const VAD_MODES = {
NORMAL: {
energyThreshold: 0.01,
speechFramesRequired: 3,
silenceFramesRequired: 30,
},
LOW_BITRATE: {
energyThreshold: 0.008,
speechFramesRequired: 4,
silenceFramesRequired: 35,
},
AGGRESSIVE: {
energyThreshold: 0.015,
speechFramesRequired: 2,
silenceFramesRequired: 20,
},
VERY_AGGRESSIVE: {
energyThreshold: 0.02,
speechFramesRequired: 2,
silenceFramesRequired: 15,
},
};
let vadConfig = VAD_MODES.VERY_AGGRESSIVE;
// Maximum speech buffer size: ~30 seconds at 16kHz, 16-bit mono
const MAX_SPEECH_BUFFER_BYTES = 16000 * 2 * 30; // 960,000 bytes
// Audio resampling buffer
let resampleRemainder = Buffer.alloc(0);
// ── Audio Resampling (24kHz → 16kHz) ──
function resample24kTo16k(inputBuffer) {
// Combine with any leftover samples from previous call
const combined = Buffer.concat([resampleRemainder, inputBuffer]);
const inputSamples = Math.floor(combined.length / 2); // 16-bit = 2 bytes per sample
// Ratio: 16000/24000 = 2/3, so for every 3 input samples we produce 2 output samples
const outputSamples = Math.floor((inputSamples * 2) / 3);
const outputBuffer = Buffer.alloc(outputSamples * 2);
for (let i = 0; i < outputSamples; i++) {
// Map output sample index to input position
const srcPos = (i * 3) / 2;
const srcIndex = Math.floor(srcPos);
const frac = srcPos - srcIndex;
const s0 = combined.readInt16LE(srcIndex * 2);
const s1 =
srcIndex + 1 < inputSamples
? combined.readInt16LE((srcIndex + 1) * 2)
: s0;
const interpolated = Math.round(s0 + frac * (s1 - s0));
outputBuffer.writeInt16LE(
Math.max(-32768, Math.min(32767, interpolated)),
i * 2,
);
}
// Store remainder for next call
const consumedInputSamples = Math.ceil((outputSamples * 3) / 2);
const remainderStart = consumedInputSamples * 2;
resampleRemainder =
remainderStart < combined.length
? combined.slice(remainderStart)
: Buffer.alloc(0);
return outputBuffer;
}
// ── VAD (Voice Activity Detection) ──
function calculateRMS(pcm16Buffer) {
const samples = pcm16Buffer.length / 2;
if (samples === 0) return 0;
let sumSquares = 0;
for (let i = 0; i < samples; i++) {
const sample = pcm16Buffer.readInt16LE(i * 2) / 32768;
sumSquares += sample * sample;
}
return Math.sqrt(sumSquares / samples);
}
function processVAD(pcm16kBuffer) {
const rms = calculateRMS(pcm16kBuffer);
const isVoice = rms > vadConfig.energyThreshold;
if (isVoice) {
speechFrameCount++;
silenceFrameCount = 0;
if (!isSpeaking && speechFrameCount >= vadConfig.speechFramesRequired) {
isSpeaking = true;
speechBuffers = [];
console.log("[LocalAI] Speech started (RMS:", rms.toFixed(4), ")");
sendToRenderer("update-status", "Listening... (speech detected)");
}
} else {
silenceFrameCount++;
speechFrameCount = 0;
if (isSpeaking && silenceFrameCount >= vadConfig.silenceFramesRequired) {
isSpeaking = false;
console.log(
"[LocalAI] Speech ended, accumulated",
speechBuffers.length,
"chunks",
);
sendToRenderer("update-status", "Transcribing...");
// Trigger transcription with accumulated audio
const audioData = Buffer.concat(speechBuffers);
speechBuffers = [];
handleSpeechEnd(audioData).catch((err) => {
console.error("[LocalAI] handleSpeechEnd crashed:", err);
sendToRenderer(
"update-status",
"Transcription error: " + (err?.message || "unknown"),
);
});
return;
}
}
// Accumulate audio during speech
if (isSpeaking) {
speechBuffers.push(Buffer.from(pcm16kBuffer));
// Cap buffer at ~30 seconds to prevent OOM and ONNX tensor overflow
const totalBytes = speechBuffers.reduce((sum, b) => sum + b.length, 0);
if (totalBytes >= MAX_SPEECH_BUFFER_BYTES) {
isSpeaking = false;
console.log(
"[LocalAI] Speech buffer limit reached (" +
totalBytes +
" bytes), forcing transcription",
);
sendToRenderer("update-status", "Transcribing (max length reached)...");
const audioData = Buffer.concat(speechBuffers);
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
handleSpeechEnd(audioData).catch((err) => {
console.error("[LocalAI] handleSpeechEnd crashed:", err);
sendToRenderer(
"update-status",
"Transcription error: " + (err?.message || "unknown"),
);
});
}
}
}
// ── Whisper Worker (isolated child process) ──
function spawnWhisperWorker() {
if (whisperWorker) return;
const workerPath = path.join(__dirname, "whisperWorker.js");
console.log("[LocalAI] Spawning Whisper worker:", workerPath);
// Determine the best way to spawn the worker:
// 1. System Node.js (preferred) — native addons were compiled against this
// ABI, so onnxruntime-node works without SIGTRAP / ABI mismatches.
// 2. Electron utilityProcess (packaged builds) — proper Node.js child
// process API that doesn't require the RunAsNode fuse.
// 3. ELECTRON_RUN_AS_NODE (last resort, dev only) — the old approach that
// only works when the RunAsNode fuse isn't flipped.
const systemNode = getSystemNode();
if (systemNode) {
// Spawn with system Node.js — onnxruntime-node native binary matches ABI
console.log("[LocalAI] Using system Node.js:", systemNode.nodePath);
whisperWorker = fork(workerPath, [], {
stdio: ["pipe", "pipe", "pipe", "ipc"],
execPath: systemNode.nodePath,
env: {
...process.env,
// Unset ELECTRON_RUN_AS_NODE so the system node doesn't inherit it
ELECTRON_RUN_AS_NODE: undefined,
},
});
} else {
// No system Node.js found — try utilityProcess (Electron >= 22)
// utilityProcess.fork() creates a proper child Node.js process without
// needing the RunAsNode fuse. Falls back to ELECTRON_RUN_AS_NODE for
// dev mode where fuses aren't applied.
try {
const { utilityProcess: UP } = require("electron");
if (UP && typeof UP.fork === "function") {
console.log("[LocalAI] Using Electron utilityProcess");
const up = UP.fork(workerPath);
// Wrap utilityProcess to look like a ChildProcess for the rest of localai.js
whisperWorker = wrapUtilityProcess(up);
return;
}
} catch (_) {
// utilityProcess not available (older Electron or renderer context)
}
console.warn(
"[LocalAI] No system Node.js — falling back to ELECTRON_RUN_AS_NODE (WASM backend will be used)",
);
whisperWorker = fork(workerPath, [], {
stdio: ["pipe", "pipe", "pipe", "ipc"],
env: { ...process.env, ELECTRON_RUN_AS_NODE: "1" },
});
}
whisperWorker.stdout.on("data", (data) => {
console.log("[WhisperWorker stdout]", data.toString().trim());
});
whisperWorker.stderr.on("data", (data) => {
console.error("[WhisperWorker stderr]", data.toString().trim());
});
whisperWorker.on("message", (msg) => {
switch (msg.type) {
case "ready":
console.log("[LocalAI] Whisper worker ready");
break;
case "load-result":
handleWorkerLoadResult(msg);
break;
case "transcribe-result":
handleWorkerTranscribeResult(msg);
break;
case "status":
sendToRenderer("update-status", msg.message);
break;
case "progress":
sendToRenderer("whisper-progress", {
file: msg.file,
progress: msg.progress,
loaded: msg.loaded,
total: msg.total,
status: msg.status,
});
break;
}
});
whisperWorker.on("exit", (code, signal) => {
console.error(
"[LocalAI] Whisper worker exited — code:",
code,
"signal:",
signal,
);
whisperWorker = null;
whisperReady = false;
// If we intentionally shut down, don't treat as crash
if (whisperShuttingDown) {
whisperShuttingDown = false;
return;
}
// Reject any pending transcription
if (pendingTranscribe) {
pendingTranscribe.reject(
new Error(
"Whisper worker crashed (code: " + code + ", signal: " + signal + ")",
),
);
pendingTranscribe = null;
}
// If session is still active, inform the user and respawn
if (isLocalActive) {
sendToRenderer(
"update-status",
"Whisper crashed (signal: " +
(signal || code) +
"). Respawning worker...",
);
setTimeout(() => {
if (isLocalActive) {
respawnWhisperWorker();
}
}, 2000);
}
});
whisperWorker.on("error", (err) => {
console.error("[LocalAI] Whisper worker error:", err);
whisperWorker = null;
whisperReady = false;
});
}
/**
* Wrap Electron's utilityProcess to behave like a ChildProcess (duck-typing)
* so the rest of localai.js can use the same API.
*/
function wrapUtilityProcess(up) {
const EventEmitter = require("events");
const wrapper = new EventEmitter();
// Forward messages
up.on("message", (msg) => wrapper.emit("message", msg));
// Map utilityProcess exit to ChildProcess-like exit event
up.on("exit", (code) => wrapper.emit("exit", code, null));
// Provide stdout/stderr stubs (utilityProcess pipes to parent console)
const { Readable } = require("stream");
wrapper.stdout = new Readable({ read() {} });
wrapper.stderr = new Readable({ read() {} });
wrapper.send = (data) => up.postMessage(data);
wrapper.kill = (signal) => up.kill();
wrapper.removeAllListeners = () => {
up.removeAllListeners();
EventEmitter.prototype.removeAllListeners.call(wrapper);
};
// Setup stdout/stderr forwarding
wrapper.stdout.on("data", (data) => {
console.log("[WhisperWorker stdout]", data.toString().trim());
});
wrapper.stderr.on("data", (data) => {
console.error("[WhisperWorker stderr]", data.toString().trim());
});
return wrapper;
}
let pendingLoad = null;
function handleWorkerLoadResult(msg) {
if (msg.success) {
console.log(
"[LocalAI] Whisper model loaded successfully (in worker, device:",
msg.device || "unknown",
")",
);
whisperReady = true;
sendToRenderer("whisper-downloading", false);
isWhisperLoading = false;
if (pendingLoad) {
pendingLoad.resolve(true);
pendingLoad = null;
}
} else {
console.error("[LocalAI] Whisper worker failed to load model:", msg.error);
sendToRenderer("whisper-downloading", false);
sendToRenderer(
"update-status",
"Failed to load Whisper model: " + msg.error,
);
isWhisperLoading = false;
if (pendingLoad) {
pendingLoad.resolve(false);
pendingLoad = null;
}
}
}
function handleWorkerTranscribeResult(msg) {
if (!pendingTranscribe) return;
if (msg.success) {
console.log("[LocalAI] Transcription:", msg.text);
pendingTranscribe.resolve(msg.text || null);
} else {
console.error("[LocalAI] Worker transcription error:", msg.error);
pendingTranscribe.resolve(null);
}
pendingTranscribe = null;
}
function respawnWhisperWorker() {
killWhisperWorker();
spawnWhisperWorker();
const { app } = require("electron");
const cacheDir = path.join(app.getPath("userData"), "whisper-models");
const modelName =
require("../storage").getPreferences().whisperModel ||
"Xenova/whisper-small";
sendToRenderer("whisper-downloading", true);
isWhisperLoading = true;
const device = resolveWhisperDevice();
whisperWorker.send({ type: "load", modelName, cacheDir, device });
}
/**
* Determine which ONNX backend to use for Whisper inference.
* - "cpu" onnxruntime-node (fast, native requires matching ABI)
* - "wasm" onnxruntime-web (slower but universally compatible)
*
* When spawned with system Node.js, native CPU backend is safe.
* Otherwise default to WASM to prevent native crashes.
*/
function resolveWhisperDevice() {
const prefs = require("../storage").getPreferences();
if (prefs.whisperDevice) return prefs.whisperDevice;
// Auto-detect: if we're running with system Node.js, native is safe
const systemNode = getSystemNode();
return systemNode ? "cpu" : "wasm";
}
/**
* Map the app's BCP-47 language tag (e.g. "en-US", "ru-RU") to the
* ISO 639-1 code that Whisper expects (e.g. "en", "ru").
* Returns "auto" when the user selected auto-detect, which tells the
* worker to let Whisper detect the language itself.
*/
function resolveWhisperLanguage() {
const prefs = require("../storage").getPreferences();
const lang = prefs.selectedLanguage || "en-US";
if (lang === "auto") return "auto";
// BCP-47: primary subtag is the ISO 639 code
// Handle special case: "cmn-CN" → "zh" (Mandarin Chinese → Whisper uses "zh")
const primary = lang.split("-")[0].toLowerCase();
const WHISPER_LANG_MAP = {
cmn: "zh",
yue: "zh",
};
return WHISPER_LANG_MAP[primary] || primary;
}
function killWhisperWorker() {
if (whisperWorker) {
whisperShuttingDown = true;
try {
whisperWorker.removeAllListeners();
whisperWorker.kill();
} catch (_) {
// Already dead
}
whisperWorker = null;
whisperReady = false;
}
}
async function loadWhisperPipeline(modelName) {
if (whisperReady) return true;
if (isWhisperLoading) return null;
isWhisperLoading = true;
console.log("[LocalAI] Loading Whisper model via worker:", modelName);
sendToRenderer("whisper-downloading", true);
sendToRenderer(
"update-status",
"Loading Whisper model (first time may take a while)...",
);
spawnWhisperWorker();
const { app } = require("electron");
const cacheDir = path.join(app.getPath("userData"), "whisper-models");
const device = resolveWhisperDevice();
console.log("[LocalAI] Whisper device:", device);
return new Promise((resolve) => {
pendingLoad = { resolve };
whisperWorker.send({ type: "load", modelName, cacheDir, device });
});
}
async function transcribeAudio(pcm16kBuffer) {
if (!whisperReady || !whisperWorker) {
console.error("[LocalAI] Whisper worker not ready");
return null;
}
if (!pcm16kBuffer || pcm16kBuffer.length < 2) {
console.error("[LocalAI] Invalid audio buffer:", pcm16kBuffer?.length);
return null;
}
console.log(
"[LocalAI] Starting transcription, audio length:",
pcm16kBuffer.length,
"bytes",
);
// Send audio to worker as base64 (IPC serialization)
const audioBase64 = pcm16kBuffer.toString("base64");
return new Promise((resolve, reject) => {
// Timeout: if worker takes > 60s, assume it's stuck
const timeout = setTimeout(() => {
console.error("[LocalAI] Transcription timed out after 60s");
if (pendingTranscribe) {
pendingTranscribe = null;
resolve(null);
}
}, 60000);
pendingTranscribe = {
resolve: (val) => {
clearTimeout(timeout);
resolve(val);
},
reject: (err) => {
clearTimeout(timeout);
reject(err);
},
};
try {
whisperWorker.send({
type: "transcribe",
audioBase64,
language: resolveWhisperLanguage(),
});
} catch (err) {
clearTimeout(timeout);
pendingTranscribe = null;
console.error("[LocalAI] Failed to send to worker:", err);
resolve(null);
}
});
}
// ── Speech End Handler ──
async function handleSpeechEnd(audioData) {
if (!isLocalActive) return;
// Minimum audio length check (~0.5 seconds at 16kHz, 16-bit)
if (audioData.length < 16000) {
console.log("[LocalAI] Audio too short, skipping");
sendToRenderer("update-status", "Listening...");
return;
}
console.log("[LocalAI] Processing audio:", audioData.length, "bytes");
try {
const transcription = await transcribeAudio(audioData);
if (
!transcription ||
transcription.trim() === "" ||
transcription.trim().length < 2
) {
console.log("[LocalAI] Empty transcription, skipping");
sendToRenderer("update-status", "Listening...");
return;
}
sendToRenderer("update-status", "Generating response...");
await sendToOllama(transcription);
} catch (error) {
console.error("[LocalAI] handleSpeechEnd error:", error);
sendToRenderer(
"update-status",
"Error: " + (error?.message || "transcription failed"),
);
}
}
// ── Ollama Chat ──
async function sendToOllama(transcription) {
if (!ollamaClient || !ollamaModel) {
console.error("[LocalAI] Ollama not configured");
return;
}
console.log(
"[LocalAI] Sending to Ollama:",
transcription.substring(0, 100) + "...",
);
localConversationHistory.push({
role: "user",
content: transcription.trim(),
});
// Keep history manageable
if (localConversationHistory.length > 20) {
localConversationHistory = localConversationHistory.slice(-20);
}
try {
const messages = [
{
role: "system",
content: currentSystemPrompt || "You are a helpful assistant.",
},
...localConversationHistory,
];
const response = await ollamaClient.chat({
model: ollamaModel,
messages,
stream: true,
});
let fullText = "";
let isFirst = true;
for await (const part of response) {
const token = part.message?.content || "";
if (token) {
fullText += token;
sendToRenderer(isFirst ? "new-response" : "update-response", fullText);
isFirst = false;
}
}
if (fullText.trim()) {
localConversationHistory.push({
role: "assistant",
content: fullText.trim(),
});
saveConversationTurn(transcription, fullText);
}
console.log("[LocalAI] Ollama response completed");
sendToRenderer("update-status", "Listening...");
} catch (error) {
console.error("[LocalAI] Ollama error:", error);
sendToRenderer("update-status", "Ollama error: " + error.message);
}
}
// ── Public API ──
async function initializeLocalSession(
ollamaHost,
model,
whisperModel,
profile,
customPrompt,
) {
console.log("[LocalAI] Initializing local session:", {
ollamaHost,
model,
whisperModel,
profile,
});
sendToRenderer("session-initializing", true);
try {
// Setup system prompt
currentSystemPrompt = getSystemPrompt(profile, customPrompt, false);
// Initialize Ollama client
ollamaClient = new Ollama({ host: ollamaHost });
ollamaModel = model;
// Test Ollama connection
try {
await ollamaClient.list();
console.log("[LocalAI] Ollama connection verified");
} catch (error) {
console.error(
"[LocalAI] Cannot connect to Ollama at",
ollamaHost,
":",
error.message,
);
sendToRenderer("session-initializing", false);
sendToRenderer(
"update-status",
"Cannot connect to Ollama at " + ollamaHost,
);
return false;
}
// Load Whisper model
const pipeline = await loadWhisperPipeline(whisperModel);
if (!pipeline) {
sendToRenderer("session-initializing", false);
return false;
}
// Reset VAD state
isSpeaking = false;
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
resampleRemainder = Buffer.alloc(0);
localConversationHistory = [];
// Initialize conversation session
initializeNewSession(profile, customPrompt);
isLocalActive = true;
sendToRenderer("session-initializing", false);
sendToRenderer("update-status", "Local AI ready - Listening...");
console.log("[LocalAI] Session initialized successfully");
return true;
} catch (error) {
console.error("[LocalAI] Initialization error:", error);
sendToRenderer("session-initializing", false);
sendToRenderer("update-status", "Local AI error: " + error.message);
return false;
}
}
function processLocalAudio(monoChunk24k) {
if (!isLocalActive) return;
// Resample from 24kHz to 16kHz
const pcm16k = resample24kTo16k(monoChunk24k);
if (pcm16k.length > 0) {
processVAD(pcm16k);
}
}
function closeLocalSession() {
console.log("[LocalAI] Closing local session");
isLocalActive = false;
isSpeaking = false;
speechBuffers = [];
silenceFrameCount = 0;
speechFrameCount = 0;
resampleRemainder = Buffer.alloc(0);
localConversationHistory = [];
ollamaClient = null;
ollamaModel = null;
currentSystemPrompt = null;
// Note: whisperWorker is kept alive to avoid reloading model on next session
// To fully clean up, call killWhisperWorker()
}
function isLocalSessionActive() {
return isLocalActive;
}
// ── Send text directly to Ollama (for manual text input) ──
async function sendLocalText(text) {
if (!isLocalActive || !ollamaClient) {
return { success: false, error: "No active local session" };
}
try {
await sendToOllama(text);
return { success: true };
} catch (error) {
return { success: false, error: error.message };
}
}
async function sendLocalImage(base64Data, prompt) {
if (!isLocalActive || !ollamaClient) {
return { success: false, error: "No active local session" };
}
try {
console.log("[LocalAI] Sending image to Ollama");
sendToRenderer("update-status", "Analyzing image...");
const userMessage = {
role: "user",
content: prompt,
images: [base64Data],
};
// Store text-only version in history
localConversationHistory.push({ role: "user", content: prompt });
if (localConversationHistory.length > 20) {
localConversationHistory = localConversationHistory.slice(-20);
}
const messages = [
{
role: "system",
content: currentSystemPrompt || "You are a helpful assistant.",
},
...localConversationHistory.slice(0, -1),
userMessage,
];
const response = await ollamaClient.chat({
model: ollamaModel,
messages,
stream: true,
});
let fullText = "";
let isFirst = true;
for await (const part of response) {
const token = part.message?.content || "";
if (token) {
fullText += token;
sendToRenderer(isFirst ? "new-response" : "update-response", fullText);
isFirst = false;
}
}
if (fullText.trim()) {
localConversationHistory.push({
role: "assistant",
content: fullText.trim(),
});
saveConversationTurn(prompt, fullText);
}
console.log("[LocalAI] Image response completed");
sendToRenderer("update-status", "Listening...");
return { success: true, text: fullText, model: ollamaModel };
} catch (error) {
console.error("[LocalAI] Image error:", error);
sendToRenderer("update-status", "Ollama error: " + error.message);
return { success: false, error: error.message };
}
}
module.exports = {
initializeLocalSession,
processLocalAudio,
closeLocalSession,
isLocalSessionActive,
sendLocalText,
sendLocalImage,
};

View File

@ -1,97 +0,0 @@
const fs = require('fs');
const path = require('path');
const { app } = require('electron');
let logFile = null;
let logPath = null;
function getLogPath() {
if (logPath) return logPath;
const userDataPath = app.getPath('userData');
const logsDir = path.join(userDataPath, 'logs');
// Create logs directory if it doesn't exist
if (!fs.existsSync(logsDir)) {
fs.mkdirSync(logsDir, { recursive: true });
}
// Create log file with timestamp
const timestamp = new Date().toISOString().split('T')[0];
logPath = path.join(logsDir, `app-${timestamp}.log`);
return logPath;
}
function initLogger() {
try {
const filePath = getLogPath();
logFile = fs.createWriteStream(filePath, { flags: 'a' });
const startMsg = `\n${'='.repeat(60)}\nApp started at ${new Date().toISOString()}\nPlatform: ${process.platform}, Arch: ${process.arch}\nElectron: ${process.versions.electron}, Node: ${process.versions.node}\nPackaged: ${app.isPackaged}\n${'='.repeat(60)}\n`;
logFile.write(startMsg);
// Override console methods to also write to file
const originalLog = console.log;
const originalError = console.error;
const originalWarn = console.warn;
console.log = (...args) => {
originalLog.apply(console, args);
writeLog('INFO', args);
};
console.error = (...args) => {
originalError.apply(console, args);
writeLog('ERROR', args);
};
console.warn = (...args) => {
originalWarn.apply(console, args);
writeLog('WARN', args);
};
console.log('Logger initialized, writing to:', filePath);
return filePath;
} catch (err) {
console.error('Failed to initialize logger:', err);
return null;
}
}
function writeLog(level, args) {
if (!logFile) return;
try {
const timestamp = new Date().toISOString();
const message = args.map(arg => {
if (typeof arg === 'object') {
try {
return JSON.stringify(arg, null, 2);
} catch {
return String(arg);
}
}
return String(arg);
}).join(' ');
logFile.write(`[${timestamp}] [${level}] ${message}\n`);
} catch (err) {
// Silently fail - don't want logging errors to crash the app
}
}
function closeLogger() {
if (logFile) {
logFile.write(`\nApp closed at ${new Date().toISOString()}\n`);
logFile.end();
logFile = null;
}
}
module.exports = {
initLogger,
closeLogger,
getLogPath,
};

177
src/utils/nodeDetect.js Normal file
View File

@ -0,0 +1,177 @@
/**
* nodeDetect.js Locate the system Node.js binary.
*
* When spawning child processes that rely on native addons compiled against the
* system Node.js ABI (e.g. onnxruntime-node), we must NOT run them inside
* Electron's embedded Node.js runtime the ABI mismatch causes SIGTRAP /
* SIGSEGV crashes. This module finds the real system `node` binary so we can
* pass it as `execPath` to `child_process.fork()`.
*
* Falls back to `null` when no system Node.js is found, letting the caller
* decide on an alternative strategy (e.g. WASM backend).
*/
const { execSync } = require("child_process");
const fs = require("fs");
const path = require("path");
const os = require("os");
/** Well-known Node.js install locations per platform. */
const KNOWN_PATHS = {
darwin: [
"/usr/local/bin/node",
"/opt/homebrew/bin/node", // Apple Silicon Homebrew
path.join(os.homedir(), ".nvm/versions/node"), // nvm — needs glob
path.join(os.homedir(), ".volta/bin/node"), // Volta
path.join(os.homedir(), ".fnm/aliases/default/bin/node"), // fnm
path.join(os.homedir(), ".mise/shims/node"), // mise (rtx)
path.join(os.homedir(), ".asdf/shims/node"), // asdf
],
linux: [
"/usr/bin/node",
"/usr/local/bin/node",
path.join(os.homedir(), ".nvm/versions/node"),
path.join(os.homedir(), ".volta/bin/node"),
path.join(os.homedir(), ".fnm/aliases/default/bin/node"),
path.join(os.homedir(), ".mise/shims/node"),
path.join(os.homedir(), ".asdf/shims/node"),
],
win32: [
"C:\\Program Files\\nodejs\\node.exe",
"C:\\Program Files (x86)\\nodejs\\node.exe",
path.join(os.homedir(), "AppData", "Roaming", "nvm", "current", "node.exe"),
path.join(os.homedir(), ".volta", "bin", "node.exe"),
],
};
/**
* Find the latest nvm-installed Node.js binary on macOS / Linux.
* Returns the path to the `node` binary or null.
*/
function findNvmNode() {
const nvmDir = path.join(os.homedir(), ".nvm", "versions", "node");
try {
if (!fs.existsSync(nvmDir)) return null;
const versions = fs.readdirSync(nvmDir).filter((d) => d.startsWith("v"));
if (versions.length === 0) return null;
// Sort semver descending (rough but sufficient)
versions.sort((a, b) => b.localeCompare(a, undefined, { numeric: true }));
const nodeBin = path.join(nvmDir, versions[0], "bin", "node");
if (fs.existsSync(nodeBin)) return nodeBin;
} catch (_) {
// Ignore
}
return null;
}
/**
* Attempt to resolve `node` via the system PATH using `which` (Unix) or
* `where` (Windows). Returns the path string or null.
*/
function whichNode() {
try {
const cmd = process.platform === "win32" ? "where node" : "which node";
const result = execSync(cmd, {
encoding: "utf8",
timeout: 5000,
env: {
...process.env,
// Ensure common manager shim dirs are on PATH
PATH: [
process.env.PATH || "",
"/usr/local/bin",
"/opt/homebrew/bin",
path.join(os.homedir(), ".volta", "bin"),
path.join(os.homedir(), ".fnm", "aliases", "default", "bin"),
path.join(os.homedir(), ".mise", "shims"),
path.join(os.homedir(), ".asdf", "shims"),
].join(process.platform === "win32" ? ";" : ":"),
},
stdio: ["ignore", "pipe", "ignore"],
}).trim();
// `where` on Windows may return multiple lines — take the first
const first = result.split(/\r?\n/)[0].trim();
if (first && fs.existsSync(first)) return first;
} catch (_) {
// Command failed
}
return null;
}
/**
* Check whether a given path is a real Node.js binary (not the Electron binary
* pretending to be Node via ELECTRON_RUN_AS_NODE).
*/
function isRealNode(nodePath) {
if (!nodePath) return false;
try {
const out = execSync(
`"${nodePath}" -e "process.stdout.write(String(!process.versions.electron))"`,
{
encoding: "utf8",
timeout: 5000,
env: { ...process.env, ELECTRON_RUN_AS_NODE: undefined },
stdio: ["ignore", "pipe", "ignore"],
},
).trim();
return out === "true";
} catch (_) {
return false;
}
}
/**
* Find the system Node.js binary.
*
* @returns {{ nodePath: string } | null} The absolute path to system `node`,
* or null if none found. The caller should fall back to WASM when null.
*/
function findSystemNode() {
// 1. Try `which node` / `where node` first (respects user's PATH / shims)
const fromPath = whichNode();
if (fromPath && isRealNode(fromPath)) {
return { nodePath: fromPath };
}
// 2. Try nvm (has multiple version dirs)
const fromNvm = findNvmNode();
if (fromNvm && isRealNode(fromNvm)) {
return { nodePath: fromNvm };
}
// 3. Walk the well-known paths for the current platform
const platform = process.platform;
const candidates = KNOWN_PATHS[platform] || KNOWN_PATHS.linux;
for (const candidate of candidates) {
// Skip the nvm root — already handled above
if (candidate.includes(".nvm/versions/node")) continue;
if (fs.existsSync(candidate) && isRealNode(candidate)) {
return { nodePath: candidate };
}
}
return null;
}
/** Cache so we only search once per process lifetime. */
let _cached = undefined;
/**
* Cached version of `findSystemNode()`.
* @returns {{ nodePath: string } | null}
*/
function getSystemNode() {
if (_cached === undefined) {
_cached = findSystemNode();
if (_cached) {
console.log("[nodeDetect] Found system Node.js:", _cached.nodePath);
} else {
console.warn(
"[nodeDetect] No system Node.js found — will fall back to WASM backend",
);
}
}
return _cached;
}
module.exports = { findSystemNode, getSystemNode, isRealNode };

View File

@ -1,402 +0,0 @@
const { BrowserWindow } = require('electron');
const WebSocket = require('ws');
// OpenAI Realtime API implementation
// Documentation: https://platform.openai.com/docs/api-reference/realtime
let ws = null;
let isUserClosing = false;
let sessionParams = null;
let reconnectAttempts = 0;
const MAX_RECONNECT_ATTEMPTS = 3;
const RECONNECT_DELAY = 2000;
// Message buffer for accumulating responses
let messageBuffer = '';
let currentTranscription = '';
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
function buildContextMessage(conversationHistory) {
const lastTurns = conversationHistory.slice(-20);
const validTurns = lastTurns.filter(turn => turn.transcription?.trim() && turn.ai_response?.trim());
if (validTurns.length === 0) return null;
const contextLines = validTurns.map(turn => `User: ${turn.transcription.trim()}\nAssistant: ${turn.ai_response.trim()}`);
return `Session reconnected. Here's the conversation so far:\n\n${contextLines.join('\n\n')}\n\nContinue from here.`;
}
async function initializeOpenAISession(config, conversationHistory = []) {
const { apiKey, baseUrl, systemPrompt, model, language, isReconnect } = config;
if (!isReconnect) {
sessionParams = config;
reconnectAttempts = 0;
sendToRenderer('session-initializing', true);
}
// Use custom baseURL or default OpenAI endpoint
const wsUrl = baseUrl || 'wss://api.openai.com/v1/realtime';
const fullUrl = `${wsUrl}?model=${model || 'gpt-4o-realtime-preview-2024-12-17'}`;
return new Promise((resolve, reject) => {
try {
ws = new WebSocket(fullUrl, {
headers: {
Authorization: `Bearer ${apiKey}`,
'OpenAI-Beta': 'realtime=v1',
},
});
ws.on('open', () => {
console.log('OpenAI Realtime connection established');
// Configure session
const sessionConfig = {
type: 'session.update',
session: {
modalities: ['text', 'audio'],
instructions: systemPrompt,
voice: 'alloy',
input_audio_format: 'pcm16',
output_audio_format: 'pcm16',
input_audio_transcription: {
model: 'whisper-1',
},
turn_detection: {
type: 'server_vad',
threshold: 0.5,
prefix_padding_ms: 300,
silence_duration_ms: 500,
},
temperature: 0.8,
max_response_output_tokens: 4096,
},
};
ws.send(JSON.stringify(sessionConfig));
// Restore context if reconnecting
if (isReconnect && conversationHistory.length > 0) {
const contextMessage = buildContextMessage(conversationHistory);
if (contextMessage) {
ws.send(
JSON.stringify({
type: 'conversation.item.create',
item: {
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: contextMessage }],
},
})
);
ws.send(JSON.stringify({ type: 'response.create' }));
}
}
sendToRenderer('update-status', 'Connected to OpenAI');
if (!isReconnect) {
sendToRenderer('session-initializing', false);
}
resolve(ws);
});
ws.on('message', data => {
try {
const event = JSON.parse(data.toString());
handleOpenAIEvent(event);
} catch (error) {
console.error('Error parsing OpenAI message:', error);
}
});
ws.on('error', error => {
console.error('OpenAI WebSocket error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
reject(error);
});
ws.on('close', (code, reason) => {
console.log(`OpenAI WebSocket closed: ${code} - ${reason}`);
if (isUserClosing) {
isUserClosing = false;
sendToRenderer('update-status', 'Session closed');
return;
}
// Attempt reconnection
if (sessionParams && reconnectAttempts < MAX_RECONNECT_ATTEMPTS) {
attemptReconnect(conversationHistory);
} else {
sendToRenderer('update-status', 'Session closed');
}
});
} catch (error) {
console.error('Failed to initialize OpenAI session:', error);
if (!isReconnect) {
sendToRenderer('session-initializing', false);
}
reject(error);
}
});
}
function handleOpenAIEvent(event) {
console.log('OpenAI event:', event.type);
switch (event.type) {
case 'session.created':
console.log('Session created:', event.session.id);
break;
case 'session.updated':
console.log('Session updated');
sendToRenderer('update-status', 'Listening...');
break;
case 'input_audio_buffer.speech_started':
console.log('Speech started');
break;
case 'input_audio_buffer.speech_stopped':
console.log('Speech stopped');
break;
case 'conversation.item.input_audio_transcription.completed':
if (event.transcript) {
currentTranscription += event.transcript;
console.log('Transcription:', event.transcript);
}
break;
case 'response.audio_transcript.delta':
if (event.delta) {
const isNewResponse = messageBuffer === '';
messageBuffer += event.delta;
sendToRenderer(isNewResponse ? 'new-response' : 'update-response', messageBuffer);
}
break;
case 'response.audio_transcript.done':
console.log('Audio transcript complete');
break;
case 'response.text.delta':
if (event.delta) {
const isNewResponse = messageBuffer === '';
messageBuffer += event.delta;
sendToRenderer(isNewResponse ? 'new-response' : 'update-response', messageBuffer);
}
break;
case 'response.done':
if (messageBuffer.trim() !== '') {
sendToRenderer('update-response', messageBuffer);
// Send conversation turn to be saved
if (currentTranscription) {
sendToRenderer('save-conversation-turn-data', {
transcription: currentTranscription,
response: messageBuffer,
});
currentTranscription = '';
}
}
messageBuffer = '';
sendToRenderer('update-status', 'Listening...');
break;
case 'error':
console.error('OpenAI error:', event.error);
sendToRenderer('update-status', 'Error: ' + event.error.message);
break;
default:
// console.log('Unhandled event type:', event.type);
break;
}
}
async function attemptReconnect(conversationHistory) {
reconnectAttempts++;
console.log(`Reconnection attempt ${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS}`);
messageBuffer = '';
currentTranscription = '';
sendToRenderer('update-status', `Reconnecting... (${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS})`);
await new Promise(resolve => setTimeout(resolve, RECONNECT_DELAY));
try {
const newConfig = { ...sessionParams, isReconnect: true };
ws = await initializeOpenAISession(newConfig, conversationHistory);
sendToRenderer('update-status', 'Reconnected! Listening...');
console.log('OpenAI session reconnected successfully');
return true;
} catch (error) {
console.error(`Reconnection attempt ${reconnectAttempts} failed:`, error);
if (reconnectAttempts < MAX_RECONNECT_ATTEMPTS) {
return attemptReconnect(conversationHistory);
}
console.log('Max reconnection attempts reached');
sendToRenderer('reconnect-failed', {
message: 'Tried 3 times to reconnect to OpenAI. Check your connection and API key.',
});
sessionParams = null;
return false;
}
}
async function sendAudioToOpenAI(base64Data) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
console.error('WebSocket not connected');
return { success: false, error: 'No active connection' };
}
try {
ws.send(
JSON.stringify({
type: 'input_audio_buffer.append',
audio: base64Data,
})
);
return { success: true };
} catch (error) {
console.error('Error sending audio to OpenAI:', error);
return { success: false, error: error.message };
}
}
async function sendTextToOpenAI(text) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
console.error('WebSocket not connected');
return { success: false, error: 'No active connection' };
}
try {
// Create a conversation item with user text
ws.send(
JSON.stringify({
type: 'conversation.item.create',
item: {
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: text }],
},
})
);
// Trigger response generation
ws.send(JSON.stringify({ type: 'response.create' }));
return { success: true };
} catch (error) {
console.error('Error sending text to OpenAI:', error);
return { success: false, error: error.message };
}
}
async function sendImageToOpenAI(base64Data, prompt, config) {
const { apiKey, baseUrl, model } = config;
// OpenAI doesn't support images in Realtime API yet, use standard Chat Completions
const apiEndpoint = baseUrl ? `${baseUrl.replace('wss://', 'https://').replace('/v1/realtime', '')}/v1/chat/completions` : 'https://api.openai.com/v1/chat/completions';
try {
const response = await fetch(apiEndpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: model || 'gpt-4o',
messages: [
{
role: 'user',
content: [
{ type: 'text', text: prompt },
{
type: 'image_url',
image_url: {
url: `data:image/jpeg;base64,${base64Data}`,
},
},
],
},
],
max_tokens: 4096,
stream: true,
}),
});
if (!response.ok) {
const error = await response.text();
throw new Error(`OpenAI API error: ${response.status} - ${error}`);
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let fullText = '';
let isFirst = true;
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split('\n').filter(line => line.trim().startsWith('data: '));
for (const line of lines) {
const data = line.replace('data: ', '');
if (data === '[DONE]') continue;
try {
const json = JSON.parse(data);
const content = json.choices[0]?.delta?.content;
if (content) {
fullText += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullText);
isFirst = false;
}
} catch (e) {
// Skip invalid JSON
}
}
}
return { success: true, text: fullText, model: model || 'gpt-4o' };
} catch (error) {
console.error('Error sending image to OpenAI:', error);
return { success: false, error: error.message };
}
}
function closeOpenAISession() {
isUserClosing = true;
sessionParams = null;
if (ws) {
ws.close();
ws = null;
}
}
module.exports = {
initializeOpenAISession,
sendAudioToOpenAI,
sendTextToOpenAI,
sendImageToOpenAI,
closeOpenAISession,
};

View File

@ -1,618 +0,0 @@
const { BrowserWindow } = require('electron');
const fs = require('fs');
const path = require('path');
const os = require('os');
const { spawn } = require('child_process');
// OpenAI SDK will be loaded dynamically
let OpenAI = null;
// OpenAI SDK-based provider (for BotHub, Azure, and other OpenAI-compatible APIs)
// This uses the standard Chat Completions API with Whisper for transcription
let openaiClient = null;
let currentConfig = null;
let conversationMessages = [];
let isProcessing = false;
// macOS audio capture
let systemAudioProc = null;
let audioBuffer = Buffer.alloc(0);
let transcriptionTimer = null;
const TRANSCRIPTION_INTERVAL_MS = 3000; // Transcribe every 3 seconds
const MIN_AUDIO_DURATION_MS = 500; // Minimum audio duration to transcribe
const SAMPLE_RATE = 24000;
function sendToRenderer(channel, data) {
const windows = BrowserWindow.getAllWindows();
if (windows.length > 0) {
windows[0].webContents.send(channel, data);
}
}
async function initializeOpenAISDK(config) {
const { apiKey, baseUrl, model } = config;
if (!apiKey) {
throw new Error('OpenAI API key is required');
}
// Dynamic import for ES module
if (!OpenAI) {
const openaiModule = await import('openai');
OpenAI = openaiModule.default;
}
const clientConfig = {
apiKey: apiKey,
};
// Use custom baseURL if provided
if (baseUrl && baseUrl.trim() !== '') {
clientConfig.baseURL = baseUrl;
}
openaiClient = new OpenAI(clientConfig);
currentConfig = config;
conversationMessages = [];
console.log('OpenAI SDK initialized with baseURL:', clientConfig.baseURL || 'default');
sendToRenderer('update-status', 'Ready (OpenAI SDK)');
return true;
}
function setSystemPrompt(systemPrompt) {
// Clear conversation and set system prompt
conversationMessages = [];
if (systemPrompt) {
conversationMessages.push({
role: 'system',
content: systemPrompt,
});
}
}
// Create WAV file from raw PCM data
function createWavBuffer(pcmBuffer, sampleRate = 24000, numChannels = 1, bitsPerSample = 16) {
const byteRate = sampleRate * numChannels * (bitsPerSample / 8);
const blockAlign = numChannels * (bitsPerSample / 8);
const dataSize = pcmBuffer.length;
const headerSize = 44;
const fileSize = headerSize + dataSize - 8;
const wavBuffer = Buffer.alloc(headerSize + dataSize);
// RIFF header
wavBuffer.write('RIFF', 0);
wavBuffer.writeUInt32LE(fileSize, 4);
wavBuffer.write('WAVE', 8);
// fmt chunk
wavBuffer.write('fmt ', 12);
wavBuffer.writeUInt32LE(16, 16); // fmt chunk size
wavBuffer.writeUInt16LE(1, 20); // audio format (1 = PCM)
wavBuffer.writeUInt16LE(numChannels, 22);
wavBuffer.writeUInt32LE(sampleRate, 24);
wavBuffer.writeUInt32LE(byteRate, 28);
wavBuffer.writeUInt16LE(blockAlign, 32);
wavBuffer.writeUInt16LE(bitsPerSample, 34);
// data chunk
wavBuffer.write('data', 36);
wavBuffer.writeUInt32LE(dataSize, 40);
// Copy PCM data
pcmBuffer.copy(wavBuffer, 44);
return wavBuffer;
}
async function transcribeAudio(audioBuffer, mimeType = 'audio/wav') {
if (!openaiClient) {
throw new Error('OpenAI client not initialized');
}
try {
// Save audio buffer to temp file (OpenAI SDK requires file path)
const tempDir = os.tmpdir();
const tempFile = path.join(tempDir, `audio_${Date.now()}.wav`);
// Convert base64 to buffer if needed
let buffer = audioBuffer;
if (typeof audioBuffer === 'string') {
buffer = Buffer.from(audioBuffer, 'base64');
}
// Create proper WAV file with header
const wavBuffer = createWavBuffer(buffer, SAMPLE_RATE, 1, 16);
fs.writeFileSync(tempFile, wavBuffer);
const transcription = await openaiClient.audio.transcriptions.create({
file: fs.createReadStream(tempFile),
model: currentConfig.whisperModel || 'whisper-1',
response_format: 'text',
});
// Clean up temp file
try {
fs.unlinkSync(tempFile);
} catch (e) {
// Ignore cleanup errors
}
return transcription;
} catch (error) {
console.error('Transcription error:', error);
throw error;
}
}
async function sendTextMessage(text) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
if (isProcessing) {
return { success: false, error: 'Already processing a request' };
}
isProcessing = true;
try {
// Add user message to conversation
conversationMessages.push({
role: 'user',
content: text,
});
sendToRenderer('update-status', 'Thinking...');
const stream = await openaiClient.chat.completions.create({
model: currentConfig.model || 'gpt-4o',
messages: conversationMessages,
stream: true,
max_tokens: 4096,
});
let fullResponse = '';
let isFirst = true;
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
fullResponse += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullResponse);
isFirst = false;
}
}
// Add assistant response to conversation
conversationMessages.push({
role: 'assistant',
content: fullResponse,
});
sendToRenderer('update-status', 'Ready');
isProcessing = false;
return { success: true, text: fullResponse };
} catch (error) {
console.error('Chat completion error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
isProcessing = false;
return { success: false, error: error.message };
}
}
async function sendImageMessage(base64Image, prompt) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
if (isProcessing) {
return { success: false, error: 'Already processing a request' };
}
isProcessing = true;
try {
sendToRenderer('update-status', 'Analyzing image...');
const messages = [
...conversationMessages,
{
role: 'user',
content: [
{ type: 'text', text: prompt },
{
type: 'image_url',
image_url: {
url: `data:image/jpeg;base64,${base64Image}`,
},
},
],
},
];
const stream = await openaiClient.chat.completions.create({
model: currentConfig.visionModel || currentConfig.model || 'gpt-4o',
messages: messages,
stream: true,
max_tokens: 4096,
});
let fullResponse = '';
let isFirst = true;
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
fullResponse += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullResponse);
isFirst = false;
}
}
// Add to conversation history (text only for follow-ups)
conversationMessages.push({
role: 'user',
content: prompt,
});
conversationMessages.push({
role: 'assistant',
content: fullResponse,
});
sendToRenderer('update-status', 'Ready');
isProcessing = false;
return { success: true, text: fullResponse, model: currentConfig.visionModel || currentConfig.model };
} catch (error) {
console.error('Vision error:', error);
sendToRenderer('update-status', 'Error: ' + error.message);
isProcessing = false;
return { success: false, error: error.message };
}
}
// Process audio chunk and get response
// This accumulates audio and transcribes when silence is detected
let audioChunks = [];
let lastAudioTime = 0;
const SILENCE_THRESHOLD_MS = 1500; // 1.5 seconds of silence
async function processAudioChunk(base64Audio, mimeType) {
if (!openaiClient) {
return { success: false, error: 'OpenAI client not initialized' };
}
const now = Date.now();
const buffer = Buffer.from(base64Audio, 'base64');
// Add to audio buffer
audioChunks.push(buffer);
lastAudioTime = now;
// Check for silence (no new audio for SILENCE_THRESHOLD_MS)
// This is a simple approach - in production you'd want proper VAD
return { success: true, buffering: true };
}
async function flushAudioAndTranscribe() {
if (audioChunks.length === 0) {
return { success: true, text: '' };
}
try {
// Combine all audio chunks
const combinedBuffer = Buffer.concat(audioChunks);
audioChunks = [];
// Transcribe
const transcription = await transcribeAudio(combinedBuffer);
if (transcription && transcription.trim()) {
// Send to chat
const response = await sendTextMessage(transcription);
return {
success: true,
transcription: transcription,
response: response.text,
};
}
return { success: true, text: '' };
} catch (error) {
console.error('Flush audio error:', error);
return { success: false, error: error.message };
}
}
function clearConversation() {
const systemMessage = conversationMessages.find(m => m.role === 'system');
conversationMessages = systemMessage ? [systemMessage] : [];
audioChunks = [];
}
function closeOpenAISDK() {
stopMacOSAudioCapture();
openaiClient = null;
currentConfig = null;
conversationMessages = [];
audioChunks = [];
isProcessing = false;
sendToRenderer('update-status', 'Disconnected');
}
// ============ macOS Audio Capture ============
async function killExistingSystemAudioDump() {
return new Promise(resolve => {
const { exec } = require('child_process');
exec('pkill -f SystemAudioDump', error => {
// Ignore errors (process might not exist)
setTimeout(resolve, 100);
});
});
}
function convertStereoToMono(stereoBuffer) {
const samples = stereoBuffer.length / 4;
const monoBuffer = Buffer.alloc(samples * 2);
for (let i = 0; i < samples; i++) {
const leftSample = stereoBuffer.readInt16LE(i * 4);
monoBuffer.writeInt16LE(leftSample, i * 2);
}
return monoBuffer;
}
// Calculate RMS (Root Mean Square) volume level of audio buffer
function calculateRMS(buffer) {
const samples = buffer.length / 2;
if (samples === 0) return 0;
let sumSquares = 0;
for (let i = 0; i < samples; i++) {
const sample = buffer.readInt16LE(i * 2);
sumSquares += sample * sample;
}
return Math.sqrt(sumSquares / samples);
}
// Check if audio contains speech (simple VAD based on volume threshold)
function hasSpeech(buffer, threshold = 500) {
const rms = calculateRMS(buffer);
return rms > threshold;
}
async function transcribeBufferedAudio() {
if (audioBuffer.length === 0 || isProcessing) {
return;
}
// Calculate audio duration
const bytesPerSample = 2;
const audioDurationMs = (audioBuffer.length / bytesPerSample / SAMPLE_RATE) * 1000;
if (audioDurationMs < MIN_AUDIO_DURATION_MS) {
return; // Not enough audio
}
// Check if there's actual speech in the audio (Voice Activity Detection)
if (!hasSpeech(audioBuffer)) {
// Clear buffer if it's just silence/noise
audioBuffer = Buffer.alloc(0);
return;
}
// Take current buffer and reset
const currentBuffer = audioBuffer;
audioBuffer = Buffer.alloc(0);
try {
console.log(`Transcribing ${audioDurationMs.toFixed(0)}ms of audio...`);
sendToRenderer('update-status', 'Transcribing...');
const transcription = await transcribeAudio(currentBuffer, 'audio/wav');
if (transcription && transcription.trim() && transcription.trim().length > 2) {
console.log('Transcription:', transcription);
sendToRenderer('update-status', 'Processing...');
// Send to chat
await sendTextMessage(transcription);
}
sendToRenderer('update-status', 'Listening...');
} catch (error) {
console.error('Transcription error:', error);
sendToRenderer('update-status', 'Listening...');
}
}
async function startMacOSAudioCapture() {
if (process.platform !== 'darwin') return false;
// Kill any existing SystemAudioDump processes first
await killExistingSystemAudioDump();
console.log('=== Starting macOS audio capture (OpenAI SDK) ===');
sendToRenderer('update-status', 'Starting audio capture...');
const { app } = require('electron');
const fs = require('fs');
let systemAudioPath;
if (app.isPackaged) {
systemAudioPath = path.join(process.resourcesPath, 'SystemAudioDump');
} else {
systemAudioPath = path.join(__dirname, '../assets', 'SystemAudioDump');
}
console.log('SystemAudioDump config:', {
path: systemAudioPath,
isPackaged: app.isPackaged,
resourcesPath: process.resourcesPath,
exists: fs.existsSync(systemAudioPath),
});
// Check if file exists
if (!fs.existsSync(systemAudioPath)) {
console.error('FATAL: SystemAudioDump not found at:', systemAudioPath);
sendToRenderer('update-status', 'Error: Audio binary not found');
return false;
}
// Check and fix executable permissions
try {
fs.accessSync(systemAudioPath, fs.constants.X_OK);
console.log('SystemAudioDump is executable');
} catch (err) {
console.warn('SystemAudioDump not executable, fixing permissions...');
try {
fs.chmodSync(systemAudioPath, 0o755);
console.log('Fixed executable permissions');
} catch (chmodErr) {
console.error('Failed to fix permissions:', chmodErr);
sendToRenderer('update-status', 'Error: Cannot execute audio binary');
return false;
}
}
const spawnOptions = {
stdio: ['ignore', 'pipe', 'pipe'],
env: {
...process.env,
},
};
console.log('Spawning SystemAudioDump...');
systemAudioProc = spawn(systemAudioPath, [], spawnOptions);
if (!systemAudioProc.pid) {
console.error('FATAL: Failed to start SystemAudioDump - no PID');
sendToRenderer('update-status', 'Error: Audio capture failed to start');
return false;
}
console.log('SystemAudioDump started with PID:', systemAudioProc.pid);
const CHUNK_DURATION = 0.1;
const BYTES_PER_SAMPLE = 2;
const CHANNELS = 2;
const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION;
let tempBuffer = Buffer.alloc(0);
let chunkCount = 0;
let firstDataReceived = false;
systemAudioProc.stdout.on('data', data => {
if (!firstDataReceived) {
firstDataReceived = true;
console.log('First audio data received! Size:', data.length);
sendToRenderer('update-status', 'Listening...');
}
tempBuffer = Buffer.concat([tempBuffer, data]);
while (tempBuffer.length >= CHUNK_SIZE) {
const chunk = tempBuffer.slice(0, CHUNK_SIZE);
tempBuffer = tempBuffer.slice(CHUNK_SIZE);
// Convert stereo to mono
const monoChunk = CHANNELS === 2 ? convertStereoToMono(chunk) : chunk;
// Add to audio buffer for transcription
audioBuffer = Buffer.concat([audioBuffer, monoChunk]);
chunkCount++;
if (chunkCount % 100 === 0) {
console.log(`Audio: ${chunkCount} chunks processed, buffer size: ${audioBuffer.length}`);
}
}
// Limit buffer size (max 30 seconds of audio)
const maxBufferSize = SAMPLE_RATE * BYTES_PER_SAMPLE * 30;
if (audioBuffer.length > maxBufferSize) {
audioBuffer = audioBuffer.slice(-maxBufferSize);
}
});
systemAudioProc.stderr.on('data', data => {
const msg = data.toString();
console.error('SystemAudioDump stderr:', msg);
if (msg.toLowerCase().includes('error')) {
sendToRenderer('update-status', 'Audio error: ' + msg.substring(0, 50));
}
});
systemAudioProc.on('close', (code, signal) => {
console.log('SystemAudioDump closed:', { code, signal, chunksProcessed: chunkCount, tempBufferSize: tempBuffer.length });
if (code !== 0 && code !== null) {
sendToRenderer('update-status', `Audio stopped (exit: ${code}, signal: ${signal})`);
}
systemAudioProc = null;
stopTranscriptionTimer();
});
systemAudioProc.on('error', err => {
console.error('SystemAudioDump spawn error:', err.message, err.stack);
sendToRenderer('update-status', 'Audio error: ' + err.message);
systemAudioProc = null;
stopTranscriptionTimer();
});
systemAudioProc.on('exit', (code, signal) => {
console.log('SystemAudioDump exit event:', { code, signal });
});
// Start periodic transcription
startTranscriptionTimer();
sendToRenderer('update-status', 'Listening...');
return true;
}
function startTranscriptionTimer() {
stopTranscriptionTimer();
transcriptionTimer = setInterval(transcribeBufferedAudio, TRANSCRIPTION_INTERVAL_MS);
}
function stopTranscriptionTimer() {
if (transcriptionTimer) {
clearInterval(transcriptionTimer);
transcriptionTimer = null;
}
}
function stopMacOSAudioCapture() {
stopTranscriptionTimer();
if (systemAudioProc) {
console.log('Stopping SystemAudioDump for OpenAI SDK...');
systemAudioProc.kill('SIGTERM');
systemAudioProc = null;
}
audioBuffer = Buffer.alloc(0);
}
module.exports = {
initializeOpenAISDK,
setSystemPrompt,
transcribeAudio,
sendTextMessage,
sendImageMessage,
processAudioChunk,
flushAudioAndTranscribe,
clearConversation,
closeOpenAISDK,
startMacOSAudioCapture,
stopMacOSAudioCapture,
};

View File

@ -1,21 +1,45 @@
const profilePrompts = { const responseModeFormats = {
interview: { brief: `**RESPONSE FORMAT REQUIREMENTS:**
intro: `You are an AI-powered interview assistant, designed to act as a discreet on-screen teleprompter. Your mission is to help the user excel in their job interview by providing concise, impactful, and ready-to-speak answers or key talking points. Analyze the ongoing interview dialogue and, crucially, the 'User-provided context' below.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:**
- Keep responses SHORT and CONCISE (1-3 sentences max) - Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability - Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis - Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate - Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`, - Focus on the most essential information only
- EXCEPTION: If a coding/algorithm task is detected, ALWAYS provide the complete working code (see CODING TASKS below)`,
searchUsage: `**SEARCH TOOL USAGE:** detailed: `**RESPONSE FORMAT REQUIREMENTS:**
- Provide a THOROUGH and COMPREHENSIVE response with full explanations
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use headers (##) to organize sections when appropriate
- Use bullet points (-) for lists when appropriate
- Include relevant context, edge cases, and reasoning
- For technical topics, explain the "why" behind each point
- No length restriction be as detailed as needed to fully answer the question`,
};
const codingAwareness = `**CODING TASKS — CRITICAL INSTRUCTION:**
When the interviewer/questioner asks to solve a coding problem, implement an algorithm, debug code, do a live coding exercise, open an IDE and write code, or any task that requires a code solution:
- You MUST provide the ACTUAL COMPLETE WORKING CODE SOLUTION
- NEVER respond with meta-advice like "now you should write code" or "prepare to implement" or "think about the approach"
- NEVER say "open your IDE" or "start coding" instead, GIVE THE CODE
- In brief mode: provide 2-3 bullet approach points, then the FULL working code with comments
- In detailed mode: explain approach, time/space complexity, edge cases, then the FULL working code with comments
- Include the programming language name in the code fence (e.g. \`\`\`python, \`\`\`javascript)
- If the language is not specified, default to Python
- The code must be complete, runnable, and correct`;
const profilePrompts = {
interview: {
intro: `You are an AI-powered interview assistant, designed to act as a discreet on-screen teleprompter. Your mission is to help the user excel in their job interview by providing concise, impactful, and ready-to-speak answers or key talking points. Analyze the ongoing interview dialogue and, crucially, the 'User-provided context' below.`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the interviewer mentions **recent events, news, or current trends** (anything from the last 6 months), **ALWAYS use Google search** to get up-to-date information - If the interviewer mentions **recent events, news, or current trends** (anything from the last 6 months), **ALWAYS use Google search** to get up-to-date information
- If they ask about **company-specific information, recent acquisitions, funding, or leadership changes**, use Google search first - If they ask about **company-specific information, recent acquisitions, funding, or leadership changes**, use Google search first
- If they mention **new technologies, frameworks, or industry developments**, search for the latest information - If they mention **new technologies, frameworks, or industry developments**, search for the latest information
- After searching, provide a **concise, informed response** based on the real-time data`, - After searching, provide a **concise, informed response** based on the real-time data`,
content: `Focus on delivering the most essential information the user needs. Your suggestions should be direct and immediately usable. content: `Focus on delivering the most essential information the user needs. Your suggestions should be direct and immediately usable.
To help the user 'crack' the interview in their specific field: To help the user 'crack' the interview in their specific field:
1. Heavily rely on the 'User-provided context' (e.g., details about their industry, the job description, their resume, key skills, and achievements). 1. Heavily rely on the 'User-provided context' (e.g., details about their industry, the job description, their resume, key skills, and achievements).
@ -32,27 +56,20 @@ You: "I've been working with React for 4 years, building everything from simple
Interviewer: "Why do you want to work here?" Interviewer: "Why do you want to work here?"
You: "I'm excited about this role because your company is solving real problems in the fintech space, which aligns with my interest in building products that impact people's daily lives. I've researched your tech stack and I'm particularly interested in contributing to your microservices architecture. Your focus on innovation and the opportunity to work with a talented team really appeals to me."`, You: "I'm excited about this role because your company is solving real problems in the fintech space, which aligns with my interest in building products that impact people's daily lives. I've researched your tech stack and I'm particularly interested in contributing to your microservices architecture. Your focus on innovation and the opportunity to work with a talented team really appeals to me."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. No coaching, no "you should" statements, no explanations - just the direct response the candidate can speak immediately. Keep it **short and impactful**.`, Provide only the exact words to say in **markdown format**. No coaching, no "you should" statements, no explanations - just the direct response the candidate can speak immediately. Keep it **short and impactful**.`,
}, },
sales: { sales: {
intro: `You are a sales call assistant. Your job is to provide the exact words the salesperson should say to prospects during sales calls. Give direct, ready-to-speak responses that are persuasive and professional.`, intro: `You are a sales call assistant. Your job is to provide the exact words the salesperson should say to prospects during sales calls. Give direct, ready-to-speak responses that are persuasive and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the prospect mentions **recent industry trends, market changes, or current events**, **ALWAYS use Google search** to get up-to-date information - If the prospect mentions **recent industry trends, market changes, or current events**, **ALWAYS use Google search** to get up-to-date information
- If they reference **competitor information, recent funding news, or market data**, search for the latest information first - If they reference **competitor information, recent funding news, or market data**, search for the latest information first
- If they ask about **new regulations, industry reports, or recent developments**, use search to provide accurate data - If they ask about **new regulations, industry reports, or recent developments**, use search to provide accurate data
- After searching, provide a **concise, informed response** that demonstrates current market knowledge`, - After searching, provide a **concise, informed response** that demonstrates current market knowledge`,
content: `Examples: content: `Examples:
Prospect: "Tell me about your product" Prospect: "Tell me about your product"
You: "Our platform helps companies like yours reduce operational costs by 30% while improving efficiency. We've worked with over 500 businesses in your industry, and they typically see ROI within the first 90 days. What specific operational challenges are you facing right now?" You: "Our platform helps companies like yours reduce operational costs by 30% while improving efficiency. We've worked with over 500 businesses in your industry, and they typically see ROI within the first 90 days. What specific operational challenges are you facing right now?"
@ -63,27 +80,20 @@ You: "Three key differentiators set us apart: First, our implementation takes ju
Prospect: "I need to think about it" Prospect: "I need to think about it"
You: "I completely understand this is an important decision. What specific concerns can I address for you today? Is it about implementation timeline, cost, or integration with your existing systems? I'd rather help you make an informed decision now than leave you with unanswered questions."`, You: "I completely understand this is an important decision. What specific concerns can I address for you today? Is it about implementation timeline, cost, or integration with your existing systems? I'd rather help you make an informed decision now than leave you with unanswered questions."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be persuasive but not pushy. Focus on value and addressing objections directly. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be persuasive but not pushy. Focus on value and addressing objections directly. Keep responses **short and impactful**.`,
}, },
meeting: { meeting: {
intro: `You are a meeting assistant. Your job is to provide the exact words to say during professional meetings, presentations, and discussions. Give direct, ready-to-speak responses that are clear and professional.`, intro: `You are a meeting assistant. Your job is to provide the exact words to say during professional meetings, presentations, and discussions. Give direct, ready-to-speak responses that are clear and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If participants mention **recent industry news, regulatory changes, or market updates**, **ALWAYS use Google search** for current information - If participants mention **recent industry news, regulatory changes, or market updates**, **ALWAYS use Google search** for current information
- If they reference **competitor activities, recent reports, or current statistics**, search for the latest data first - If they reference **competitor activities, recent reports, or current statistics**, search for the latest data first
- If they discuss **new technologies, tools, or industry developments**, use search to provide accurate insights - If they discuss **new technologies, tools, or industry developments**, use search to provide accurate insights
- After searching, provide a **concise, informed response** that adds value to the discussion`, - After searching, provide a **concise, informed response** that adds value to the discussion`,
content: `Examples: content: `Examples:
Participant: "What's the status on the project?" Participant: "What's the status on the project?"
You: "We're currently on track to meet our deadline. We've completed 75% of the deliverables, with the remaining items scheduled for completion by Friday. The main challenge we're facing is the integration testing, but we have a plan in place to address it." You: "We're currently on track to meet our deadline. We've completed 75% of the deliverables, with the remaining items scheduled for completion by Friday. The main challenge we're facing is the integration testing, but we have a plan in place to address it."
@ -94,27 +104,20 @@ You: "Absolutely. We're currently at 80% of our allocated budget with 20% of the
Participant: "What are the next steps?" Participant: "What are the next steps?"
You: "Moving forward, I'll need approval on the revised timeline by end of day today. Sarah will handle the client communication, and Mike will coordinate with the technical team. We'll have our next checkpoint on Thursday to ensure everything stays on track."`, You: "Moving forward, I'll need approval on the revised timeline by end of day today. Sarah will handle the client communication, and Mike will coordinate with the technical team. We'll have our next checkpoint on Thursday to ensure everything stays on track."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be clear, concise, and action-oriented in your responses. Keep it **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be clear, concise, and action-oriented in your responses. Keep it **short and impactful**.`,
}, },
presentation: { presentation: {
intro: `You are a presentation coach. Your job is to provide the exact words the presenter should say during presentations, pitches, and public speaking events. Give direct, ready-to-speak responses that are engaging and confident.`, intro: `You are a presentation coach. Your job is to provide the exact words the presenter should say during presentations, pitches, and public speaking events. Give direct, ready-to-speak responses that are engaging and confident.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the audience asks about **recent market trends, current statistics, or latest industry data**, **ALWAYS use Google search** for up-to-date information - If the audience asks about **recent market trends, current statistics, or latest industry data**, **ALWAYS use Google search** for up-to-date information
- If they reference **recent events, new competitors, or current market conditions**, search for the latest information first - If they reference **recent events, new competitors, or current market conditions**, search for the latest information first
- If they inquire about **recent studies, reports, or breaking news** in your field, use search to provide accurate data - If they inquire about **recent studies, reports, or breaking news** in your field, use search to provide accurate data
- After searching, provide a **concise, credible response** with current facts and figures`, - After searching, provide a **concise, credible response** with current facts and figures`,
content: `Examples: content: `Examples:
Audience: "Can you explain that slide again?" Audience: "Can you explain that slide again?"
You: "Of course. This slide shows our three-year growth trajectory. The blue line represents revenue, which has grown 150% year over year. The orange bars show our customer acquisition, doubling each year. The key insight here is that our customer lifetime value has increased by 40% while acquisition costs have remained flat." You: "Of course. This slide shows our three-year growth trajectory. The blue line represents revenue, which has grown 150% year over year. The orange bars show our customer acquisition, doubling each year. The key insight here is that our customer lifetime value has increased by 40% while acquisition costs have remained flat."
@ -125,27 +128,20 @@ You: "Great question. Our competitive advantage comes down to three core strengt
Audience: "How do you plan to scale?" Audience: "How do you plan to scale?"
You: "Our scaling strategy focuses on three pillars. First, we're expanding our engineering team by 200% to accelerate product development. Second, we're entering three new markets next quarter. Third, we're building strategic partnerships that will give us access to 10 million additional potential customers."`, You: "Our scaling strategy focuses on three pillars. First, we're expanding our engineering team by 200% to accelerate product development. Second, we're entering three new markets next quarter. Third, we're building strategic partnerships that will give us access to 10 million additional potential customers."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Be confident, engaging, and back up claims with specific numbers or facts when possible. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Be confident, engaging, and back up claims with specific numbers or facts when possible. Keep responses **short and impactful**.`,
}, },
negotiation: { negotiation: {
intro: `You are a negotiation assistant. Your job is to provide the exact words to say during business negotiations, contract discussions, and deal-making conversations. Give direct, ready-to-speak responses that are strategic and professional.`, intro: `You are a negotiation assistant. Your job is to provide the exact words to say during business negotiations, contract discussions, and deal-making conversations. Give direct, ready-to-speak responses that are strategic and professional.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-3 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for key points and emphasis
- Use bullet points (-) for lists when appropriate
- Focus on the most essential information only`,
searchUsage: `**SEARCH TOOL USAGE:**
- If they mention **recent market pricing, current industry standards, or competitor offers**, **ALWAYS use Google search** for current benchmarks - If they mention **recent market pricing, current industry standards, or competitor offers**, **ALWAYS use Google search** for current benchmarks
- If they reference **recent legal changes, new regulations, or market conditions**, search for the latest information first - If they reference **recent legal changes, new regulations, or market conditions**, search for the latest information first
- If they discuss **recent company news, financial performance, or industry developments**, use search to provide informed responses - If they discuss **recent company news, financial performance, or industry developments**, use search to provide informed responses
- After searching, provide a **strategic, well-informed response** that leverages current market intelligence`, - After searching, provide a **strategic, well-informed response** that leverages current market intelligence`,
content: `Examples: content: `Examples:
Other party: "That price is too high" Other party: "That price is too high"
You: "I understand your concern about the investment. Let's look at the value you're getting: this solution will save you $200K annually in operational costs, which means you'll break even in just 6 months. Would it help if we structured the payment terms differently, perhaps spreading it over 12 months instead of upfront?" You: "I understand your concern about the investment. Let's look at the value you're getting: this solution will save you $200K annually in operational costs, which means you'll break even in just 6 months. Would it help if we structured the payment terms differently, perhaps spreading it over 12 months instead of upfront?"
@ -156,27 +152,20 @@ You: "I appreciate your directness. We want this to work for both parties. Our c
Other party: "We're considering other options" Other party: "We're considering other options"
You: "That's smart business practice. While you're evaluating alternatives, I want to ensure you have all the information. Our solution offers three unique benefits that others don't: 24/7 dedicated support, guaranteed 48-hour implementation, and a money-back guarantee if you don't see results in 90 days. How important are these factors in your decision?"`, You: "That's smart business practice. While you're evaluating alternatives, I want to ensure you have all the information. Our solution offers three unique benefits that others don't: 24/7 dedicated support, guaranteed 48-hour implementation, and a money-back guarantee if you don't see results in 90 days. How important are these factors in your decision?"`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide only the exact words to say in **markdown format**. Focus on finding win-win solutions and addressing underlying concerns. Keep responses **short and impactful**.`, Provide only the exact words to say in **markdown format**. Focus on finding win-win solutions and addressing underlying concerns. Keep responses **short and impactful**.`,
}, },
exam: { exam: {
intro: `You are an exam assistant designed to help students pass tests efficiently. Your role is to provide direct, accurate answers to exam questions with minimal explanation - just enough to confirm the answer is correct.`, intro: `You are an exam assistant designed to help students pass tests efficiently. Your role is to provide direct, accurate answers to exam questions with minimal explanation - just enough to confirm the answer is correct.`,
formatRequirements: `**RESPONSE FORMAT REQUIREMENTS:** searchUsage: `**SEARCH TOOL USAGE:**
- Keep responses SHORT and CONCISE (1-2 sentences max)
- Use **markdown formatting** for better readability
- Use **bold** for the answer choice/result
- Focus on the most essential information only
- Provide only brief justification for correctness`,
searchUsage: `**SEARCH TOOL USAGE:**
- If the question involves **recent information, current events, or updated facts**, **ALWAYS use Google search** for the latest data - If the question involves **recent information, current events, or updated facts**, **ALWAYS use Google search** for the latest data
- If they reference **specific dates, statistics, or factual information** that might be outdated, search for current information - If they reference **specific dates, statistics, or factual information** that might be outdated, search for current information
- If they ask about **recent research, new theories, or updated methodologies**, search for the latest information - If they ask about **recent research, new theories, or updated methodologies**, search for the latest information
- After searching, provide **direct, accurate answers** with minimal explanation`, - After searching, provide **direct, accurate answers** with minimal explanation`,
content: `Focus on providing efficient exam assistance that helps students pass tests quickly. content: `Focus on providing efficient exam assistance that helps students pass tests quickly.
**Key Principles:** **Key Principles:**
1. **Answer the question directly** - no unnecessary explanations 1. **Answer the question directly** - no unnecessary explanations
@ -196,83 +185,62 @@ You: "**Question**: Which of the following is a primary color? A) Green B) Red C
Question: "Solve for x: 2x + 5 = 13" Question: "Solve for x: 2x + 5 = 13"
You: "**Question**: Solve for x: 2x + 5 = 13 **Answer**: x = 4 **Why**: Subtract 5 from both sides: 2x = 8, then divide by 2: x = 4."`, You: "**Question**: Solve for x: 2x + 5 = 13 **Answer**: x = 4 **Why**: Subtract 5 from both sides: 2x = 8, then divide by 2: x = 4."`,
outputInstructions: `**OUTPUT INSTRUCTIONS:** outputInstructions: `**OUTPUT INSTRUCTIONS:**
Provide direct exam answers in **markdown format**. Include the question text, the correct answer choice, and a brief justification. Focus on efficiency and accuracy. Keep responses **short and to the point**.`, Provide direct exam answers in **markdown format**. Include the question text, the correct answer choice, and a brief justification. Focus on efficiency and accuracy. Keep responses **short and to the point**.`,
}, },
}; };
function buildSystemPrompt(promptParts, customPrompt = '', googleSearchEnabled = true) { function buildSystemPrompt(
const sections = [promptParts.intro, '\n\n', promptParts.formatRequirements]; promptParts,
customPrompt = "",
googleSearchEnabled = true,
responseMode = "brief",
) {
const formatReqs =
responseModeFormats[responseMode] || responseModeFormats.brief;
const sections = [
promptParts.intro,
"\n\n",
formatReqs,
"\n\n",
codingAwareness,
];
// Only add search usage section if Google Search is enabled // Only add search usage section if Google Search is enabled
if (googleSearchEnabled) { if (googleSearchEnabled) {
sections.push('\n\n', promptParts.searchUsage); sections.push("\n\n", promptParts.searchUsage);
} }
sections.push('\n\n', promptParts.content, '\n\nUser-provided context\n-----\n', customPrompt, '\n-----\n\n', promptParts.outputInstructions); sections.push(
"\n\n",
promptParts.content,
"\n\nUser-provided context\n-----\n",
customPrompt,
"\n-----\n\n",
promptParts.outputInstructions,
);
return sections.join(''); return sections.join("");
} }
function getSystemPrompt(profile, customPrompt = '', googleSearchEnabled = true) { function getSystemPrompt(
const promptParts = profilePrompts[profile] || profilePrompts.interview; profile,
return buildSystemPrompt(promptParts, customPrompt, googleSearchEnabled); customPrompt = "",
googleSearchEnabled = true,
responseMode = "brief",
) {
const promptParts = profilePrompts[profile] || profilePrompts.interview;
return buildSystemPrompt(
promptParts,
customPrompt,
googleSearchEnabled,
responseMode,
);
} }
// Comprehensive prompt for Vision/Image analysis
const VISION_ANALYSIS_PROMPT = `You are an expert AI assistant analyzing a screenshot. Your task is to understand what the user needs help with and provide the most useful response.
**ANALYSIS APPROACH:**
1. First, identify what's shown on the screen (code editor, math problem, website, document, exam, etc.)
2. Determine what the user likely needs (explanation, solution, answer, debugging help, etc.)
3. Provide a direct, actionable response
**RESPONSE GUIDELINES BY CONTEXT:**
**If it's CODE (LeetCode, HackerRank, coding interview, IDE):**
- Identify the programming language and problem type
- Provide a brief explanation of the approach (2-3 bullet points max)
- Give the complete, working code solution
- Include time/space complexity if relevant
- If there's an error, explain the fix
**If it's MATH or SCIENCE:**
- Show step-by-step solution
- Use proper mathematical notation with LaTeX ($..$ for inline, $$...$$ for blocks)
- Provide the final answer clearly marked
- Include any relevant formulas used
**If it's MCQ/EXAM/QUIZ:**
- State the correct answer immediately and clearly (e.g., "**Answer: B**")
- Provide brief justification (1-2 sentences)
- If multiple questions visible, answer all of them
**If it's a DOCUMENT/ARTICLE/WEBSITE:**
- Summarize the key information
- Answer any specific questions if apparent
- Highlight important points
**If it's a FORM/APPLICATION:**
- Help fill in the required information
- Suggest appropriate responses
- Point out any issues or missing fields
**If it's an ERROR/DEBUG scenario:**
- Identify the error type and cause
- Provide the fix immediately
- Explain briefly why it occurred
**FORMAT REQUIREMENTS:**
- Use **markdown** for formatting
- Use **bold** for key answers and important points
- Use code blocks with language specification for code
- Be concise but complete - no unnecessary explanations
- No pleasantries or filler text - get straight to the answer
**CRITICAL:** Provide the complete answer. Don't ask for clarification - make reasonable assumptions and deliver value immediately.`;
module.exports = { module.exports = {
profilePrompts, profilePrompts,
getSystemPrompt, responseModeFormats,
VISION_ANALYSIS_PROMPT, codingAwareness,
getSystemPrompt,
}; };

File diff suppressed because it is too large Load Diff

332
src/utils/whisperWorker.js Normal file
View File

@ -0,0 +1,332 @@
/**
* Whisper Worker runs ONNX Runtime in an isolated child process.
*
* The main Electron process forks this file and communicates via IPC messages.
* If ONNX Runtime crashes (SIGSEGV/SIGABRT inside the native Metal or CPU
* execution provider), only this worker dies the main process survives and
* can respawn the worker automatically.
*
* Protocol (parent worker):
* parent worker:
* { type: 'load', modelName, cacheDir, device? }
* { type: 'transcribe', audioBase64, language? } // PCM 16-bit 16kHz as base64
* { type: 'shutdown' }
*
* worker parent:
* { type: 'load-result', success, error?, device? }
* { type: 'transcribe-result', success, text?, error? }
* { type: 'status', message }
* { type: 'ready' }
*/
// ── Crash handlers — report fatal errors before the process dies ──
process.on("uncaughtException", (err) => {
try {
send({
type: "status",
message: `[Worker] Uncaught exception: ${err.message || err}`,
});
console.error("[WhisperWorker] Uncaught exception:", err);
} catch (_) {
// Cannot communicate with parent anymore
}
process.exit(1);
});
process.on("unhandledRejection", (reason) => {
try {
send({
type: "status",
message: `[Worker] Unhandled rejection: ${reason?.message || reason}`,
});
console.error("[WhisperWorker] Unhandled rejection:", reason);
} catch (_) {
// Cannot communicate with parent anymore
}
// Don't exit — let it be caught by the pipeline's own handlers
});
let whisperPipeline = null;
/** Which ONNX backend is actually active: "cpu" | "wasm" */
let activeDevice = null;
function pcm16ToFloat32(pcm16Buffer) {
if (!pcm16Buffer || pcm16Buffer.length === 0) {
return new Float32Array(0);
}
const alignedLength =
pcm16Buffer.length % 2 === 0 ? pcm16Buffer.length : pcm16Buffer.length - 1;
const samples = alignedLength / 2;
const float32 = new Float32Array(samples);
for (let i = 0; i < samples; i++) {
float32[i] = pcm16Buffer.readInt16LE(i * 2) / 32768;
}
return float32;
}
/**
* Load the Whisper model.
*
* @param {string} modelName HuggingFace model id, e.g. "Xenova/whisper-small"
* @param {string} cacheDir Directory for cached model files
* @param {string} [device] "cpu" (onnxruntime-node) or "wasm" (onnxruntime-web).
* When "cpu" is requested we try native first and fall
* back to "wasm" on failure (ABI mismatch, etc.).
*/
async function loadModel(modelName, cacheDir, device = "cpu") {
if (whisperPipeline) {
send({ type: "load-result", success: true, device: activeDevice });
return;
}
try {
send({
type: "status",
message: "Loading Whisper model (first time may take a while)...",
});
// Validate / create cache directory
const fs = require("fs");
const path = require("path");
if (cacheDir) {
try {
if (!fs.existsSync(cacheDir)) {
fs.mkdirSync(cacheDir, { recursive: true });
console.log("[WhisperWorker] Created cache directory:", cacheDir);
}
} catch (mkdirErr) {
console.warn(
"[WhisperWorker] Cannot create cache dir:",
mkdirErr.message,
);
}
// Check for corrupted partial downloads — if an onnx file exists but
// is suspiciously small (< 1 KB), delete it so the library re-downloads.
try {
const modelDir = path.join(cacheDir, modelName.replace("/", path.sep));
if (fs.existsSync(modelDir)) {
const walk = (dir) => {
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const full = path.join(dir, entry.name);
if (entry.isDirectory()) {
walk(full);
} else if (
entry.name.endsWith(".onnx") &&
fs.statSync(full).size < 1024
) {
console.warn(
"[WhisperWorker] Removing likely-corrupt file:",
full,
);
fs.unlinkSync(full);
}
}
};
walk(modelDir);
}
} catch (cleanErr) {
console.warn("[WhisperWorker] Cache cleanup error:", cleanErr.message);
}
}
const { pipeline, env } = await import("@huggingface/transformers");
env.cacheDir = cacheDir;
// Attempt to load with the requested device
const devicesToTry = device === "wasm" ? ["wasm"] : ["cpu", "wasm"];
let lastError = null;
for (const dev of devicesToTry) {
try {
send({
type: "status",
message: `Loading Whisper (${dev} backend)...`,
});
console.log(
`[WhisperWorker] Trying device: ${dev}, model: ${modelName}`,
);
whisperPipeline = await pipeline(
"automatic-speech-recognition",
modelName,
{
dtype: "q8",
device: dev,
progress_callback: (progress) => {
// progress: { status, name?, file?, progress?, loaded?, total? }
if (
progress.status === "download" ||
progress.status === "progress"
) {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: progress.progress ?? 0,
loaded: progress.loaded ?? 0,
total: progress.total ?? 0,
status: progress.status,
});
} else if (progress.status === "done") {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: 100,
loaded: progress.total ?? 0,
total: progress.total ?? 0,
status: "done",
});
} else if (progress.status === "initiate") {
send({
type: "progress",
file: progress.file || progress.name || "",
progress: 0,
loaded: 0,
total: 0,
status: "initiate",
});
}
},
},
);
activeDevice = dev;
console.log(
`[WhisperWorker] Model loaded successfully (device: ${dev})`,
);
send({ type: "load-result", success: true, device: dev });
return;
} catch (err) {
lastError = err;
console.error(
`[WhisperWorker] Failed to load with device "${dev}":`,
err.message || err,
);
if (dev === "cpu" && devicesToTry.includes("wasm")) {
send({
type: "status",
message: `Native CPU backend failed (${err.message}). Trying WASM fallback...`,
});
}
// Reset pipeline state before retry
whisperPipeline = null;
}
}
// All devices failed
throw lastError || new Error("All ONNX backends failed");
} catch (error) {
send({ type: "load-result", success: false, error: error.message });
}
}
async function transcribe(audioBase64, language) {
if (!whisperPipeline) {
send({
type: "transcribe-result",
success: false,
error: "Whisper pipeline not loaded",
});
return;
}
try {
const pcm16Buffer = Buffer.from(audioBase64, "base64");
if (pcm16Buffer.length < 2) {
send({
type: "transcribe-result",
success: false,
error: "Audio buffer too small",
});
return;
}
// Cap at ~30 seconds (16kHz, 16-bit mono)
const maxBytes = 16000 * 2 * 30;
const audioData =
pcm16Buffer.length > maxBytes
? pcm16Buffer.slice(0, maxBytes)
: pcm16Buffer;
const float32Audio = pcm16ToFloat32(audioData);
if (float32Audio.length === 0) {
send({
type: "transcribe-result",
success: false,
error: "Empty audio after conversion",
});
return;
}
// Build pipeline options with the requested language
const pipelineOpts = {
sampling_rate: 16000,
task: "transcribe",
};
if (language && language !== "auto") {
pipelineOpts.language = language;
}
const result = await whisperPipeline(float32Audio, pipelineOpts);
const text = result.text?.trim() || "";
send({ type: "transcribe-result", success: true, text });
} catch (error) {
send({
type: "transcribe-result",
success: false,
error: error.message || String(error),
});
}
}
function send(msg) {
try {
if (process.send) {
process.send(msg);
}
} catch (_) {
// Parent may have disconnected
}
}
process.on("message", (msg) => {
switch (msg.type) {
case "load":
loadModel(msg.modelName, msg.cacheDir, msg.device).catch((err) => {
send({ type: "load-result", success: false, error: err.message });
});
break;
case "transcribe":
transcribe(msg.audioBase64, msg.language).catch((err) => {
send({ type: "transcribe-result", success: false, error: err.message });
});
break;
case "shutdown":
// Dispose the ONNX session gracefully before exiting to avoid
// native cleanup race conditions (SIGABRT on mutex destroy).
(async () => {
if (whisperPipeline) {
try {
if (typeof whisperPipeline.dispose === "function") {
await whisperPipeline.dispose();
}
} catch (_) {
// Best-effort cleanup
}
whisperPipeline = null;
}
// Small delay to let native threads wind down
setTimeout(() => process.exit(0), 200);
})();
break;
}
});
// Signal readiness to parent
send({ type: "ready" });

File diff suppressed because it is too large Load Diff