Improve recorder responsiveness and UI consistency: 1) Added music control during recording 2) Made recorder more responsive by removing animations 3) Improved UI consistency with consistent button appearance 4) Optimized sound and UI sequence for better user experience
This commit is contained in:
parent
b5773b38a1
commit
9d38c7c8fa
219
VoiceInk/MediaController.swift
Normal file
219
VoiceInk/MediaController.swift
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
import Foundation
|
||||||
|
import AppKit
|
||||||
|
import SwiftUI
|
||||||
|
import os
|
||||||
|
import Combine
|
||||||
|
|
||||||
|
/// Controls media playback detection and management during recording
|
||||||
|
class MediaController: ObservableObject {
|
||||||
|
static let shared = MediaController()
|
||||||
|
private var mediaRemoteHandle: UnsafeMutableRawPointer?
|
||||||
|
private var mrNowPlayingIsPlaying: MRNowPlayingIsPlayingFunc?
|
||||||
|
private var didPauseMedia = false
|
||||||
|
|
||||||
|
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "MediaController")
|
||||||
|
|
||||||
|
@Published var isMediaPauseEnabled: Bool = UserDefaults.standard.bool(forKey: "isMediaPauseEnabled") {
|
||||||
|
didSet {
|
||||||
|
UserDefaults.standard.set(isMediaPauseEnabled, forKey: "isMediaPauseEnabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define function pointer types for MediaRemote functions
|
||||||
|
typealias MRNowPlayingIsPlayingFunc = @convention(c) (DispatchQueue, @escaping (Bool) -> Void) -> Void
|
||||||
|
typealias MRMediaRemoteCommandInfoFunc = @convention(c) () -> Void
|
||||||
|
|
||||||
|
// Additional function pointers for direct control
|
||||||
|
private var mrSendCommand: (@convention(c) (Int, [String: Any]?) -> Bool)?
|
||||||
|
|
||||||
|
// MediaRemote command constants
|
||||||
|
private let kMRPlay = 0
|
||||||
|
private let kMRPause = 1
|
||||||
|
private let kMRTogglePlayPause = 2
|
||||||
|
|
||||||
|
private init() {
|
||||||
|
// Set default if not already set
|
||||||
|
if !UserDefaults.standard.contains(key: "isMediaPauseEnabled") {
|
||||||
|
UserDefaults.standard.set(true, forKey: "isMediaPauseEnabled")
|
||||||
|
}
|
||||||
|
setupMediaRemote()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func setupMediaRemote() {
|
||||||
|
// Open the private framework
|
||||||
|
guard let handle = dlopen("/System/Library/PrivateFrameworks/MediaRemote.framework/MediaRemote", RTLD_NOW) else {
|
||||||
|
logger.error("Unable to open MediaRemote framework")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
mediaRemoteHandle = handle
|
||||||
|
|
||||||
|
// Get pointer for the "is playing" function
|
||||||
|
guard let playingPtr = dlsym(handle, "MRMediaRemoteGetNowPlayingApplicationIsPlaying") else {
|
||||||
|
logger.error("Unable to find MRMediaRemoteGetNowPlayingApplicationIsPlaying function")
|
||||||
|
dlclose(handle)
|
||||||
|
mediaRemoteHandle = nil
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
mrNowPlayingIsPlaying = unsafeBitCast(playingPtr, to: MRNowPlayingIsPlayingFunc.self)
|
||||||
|
|
||||||
|
// Get the send command function pointer
|
||||||
|
if let sendCommandPtr = dlsym(handle, "MRMediaRemoteSendCommand") {
|
||||||
|
mrSendCommand = unsafeBitCast(sendCommandPtr, to: (@convention(c) (Int, [String: Any]?) -> Bool).self)
|
||||||
|
logger.info("Successfully loaded MRMediaRemoteSendCommand function")
|
||||||
|
} else {
|
||||||
|
logger.warning("Could not find MRMediaRemoteSendCommand function, fallback to key simulation")
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("MediaRemote framework initialized successfully")
|
||||||
|
}
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
if let handle = mediaRemoteHandle {
|
||||||
|
dlclose(handle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks if media is currently playing on the system
|
||||||
|
func isMediaPlaying() async -> Bool {
|
||||||
|
guard isMediaPauseEnabled, let mrNowPlayingIsPlaying = mrNowPlayingIsPlaying else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return await withCheckedContinuation { continuation in
|
||||||
|
mrNowPlayingIsPlaying(DispatchQueue.main) { isPlaying in
|
||||||
|
continuation.resume(returning: isPlaying)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pauses media if it's currently playing
|
||||||
|
func pauseMediaIfPlaying() async -> Bool {
|
||||||
|
guard isMediaPauseEnabled else {
|
||||||
|
logger.info("Media pause feature is disabled")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if await isMediaPlaying() {
|
||||||
|
logger.info("Media is playing, pausing it for recording")
|
||||||
|
await MainActor.run {
|
||||||
|
// Try direct command first, then fall back to key simulation
|
||||||
|
if !sendMediaCommand(command: kMRPause) {
|
||||||
|
sendMediaKey()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
didPauseMedia = true
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("No media playing, no need to pause")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resumes media if it was paused by this controller
|
||||||
|
func resumeMediaIfPaused() async {
|
||||||
|
guard isMediaPauseEnabled, didPauseMedia else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Resuming previously paused media")
|
||||||
|
await MainActor.run {
|
||||||
|
// Try direct command first, then fall back to key simulation
|
||||||
|
if !sendMediaCommand(command: kMRPlay) {
|
||||||
|
sendMediaKey()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
didPauseMedia = false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sends a media command using the MediaRemote framework
|
||||||
|
private func sendMediaCommand(command: Int) -> Bool {
|
||||||
|
guard let sendCommand = mrSendCommand else {
|
||||||
|
logger.warning("MRMediaRemoteSendCommand not available")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sendCommand(command, nil)
|
||||||
|
logger.info("Sent media command \(command) with result: \(result)")
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Simulates a media key press (Play/Pause) by posting a system-defined NSEvent
|
||||||
|
private func sendMediaKey() {
|
||||||
|
let NX_KEYTYPE_PLAY: UInt32 = 16
|
||||||
|
let keys = [NX_KEYTYPE_PLAY]
|
||||||
|
|
||||||
|
logger.info("Simulating media key press using NSEvent")
|
||||||
|
|
||||||
|
for key in keys {
|
||||||
|
func postKeyEvent(down: Bool) {
|
||||||
|
let flags: NSEvent.ModifierFlags = down ? .init(rawValue: 0xA00) : .init(rawValue: 0xB00)
|
||||||
|
let data1 = Int((key << 16) | (down ? 0xA << 8 : 0xB << 8))
|
||||||
|
|
||||||
|
if let event = NSEvent.otherEvent(
|
||||||
|
with: .systemDefined,
|
||||||
|
location: .zero,
|
||||||
|
modifierFlags: flags,
|
||||||
|
timestamp: 0,
|
||||||
|
windowNumber: 0,
|
||||||
|
context: nil,
|
||||||
|
subtype: 8,
|
||||||
|
data1: data1,
|
||||||
|
data2: -1
|
||||||
|
) {
|
||||||
|
// Attempt to post directly to all applications
|
||||||
|
let didPost = event.cgEvent?.post(tap: .cghidEventTap) != nil
|
||||||
|
logger.info("Posted key event (down: \(down)) with result: \(didPost ? "success" : "failure")")
|
||||||
|
|
||||||
|
// Add a small delay to ensure the event is processed
|
||||||
|
usleep(10000) // 10ms delay
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the key down/up sequence
|
||||||
|
postKeyEvent(down: true)
|
||||||
|
postKeyEvent(down: false)
|
||||||
|
|
||||||
|
// Allow some time for the system to process the key event
|
||||||
|
usleep(50000) // 50ms delay
|
||||||
|
}
|
||||||
|
|
||||||
|
// As a fallback, try to use CGEvent directly
|
||||||
|
createAndPostPlayPauseEvent()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates and posts a CGEvent for media control as a fallback method
|
||||||
|
private func createAndPostPlayPauseEvent() {
|
||||||
|
logger.info("Attempting fallback CGEvent for media control")
|
||||||
|
|
||||||
|
// Media keys as defined in IOKit
|
||||||
|
let NX_KEYTYPE_PLAY: Int64 = 16
|
||||||
|
|
||||||
|
// Create a CGEvent for the media key
|
||||||
|
guard let source = CGEventSource(stateID: .hidSystemState) else {
|
||||||
|
logger.error("Failed to create CGEventSource")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if let keyDownEvent = CGEvent(keyboardEventSource: source, virtualKey: UInt16(NX_KEYTYPE_PLAY), keyDown: true) {
|
||||||
|
keyDownEvent.flags = .init(rawValue: 0xA00)
|
||||||
|
keyDownEvent.post(tap: .cghidEventTap)
|
||||||
|
logger.info("Posted play/pause key down event")
|
||||||
|
|
||||||
|
// Small delay between down and up events
|
||||||
|
usleep(10000) // 10ms
|
||||||
|
|
||||||
|
if let keyUpEvent = CGEvent(keyboardEventSource: source, virtualKey: UInt16(NX_KEYTYPE_PLAY), keyDown: false) {
|
||||||
|
keyUpEvent.flags = .init(rawValue: 0xB00)
|
||||||
|
keyUpEvent.post(tap: .cghidEventTap)
|
||||||
|
logger.info("Posted play/pause key up event")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension UserDefaults {
|
||||||
|
func contains(key: String) -> Bool {
|
||||||
|
return object(forKey: key) != nil
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -58,7 +58,7 @@ enum PromptTemplates {
|
|||||||
Input: "quick update on the project we're at 60% complete but facing some testing issues that might delay things we're working on solutions"
|
Input: "quick update on the project we're at 60% complete but facing some testing issues that might delay things we're working on solutions"
|
||||||
|
|
||||||
Output: "We're at 60% complete but facing some testing issues that might delay things. We're working on solutions.
|
Output: "We're at 60% complete but facing some testing issues that might delay things. We're working on solutions.
|
||||||
Can you please push the recent changes that we have made to GitHub?
|
|
||||||
I'll keep you updated.
|
I'll keep you updated.
|
||||||
|
|
||||||
Regards,
|
Regards,
|
||||||
|
|||||||
@ -9,6 +9,7 @@ actor Recorder {
|
|||||||
private let deviceManager = AudioDeviceManager.shared
|
private let deviceManager = AudioDeviceManager.shared
|
||||||
private var deviceObserver: NSObjectProtocol?
|
private var deviceObserver: NSObjectProtocol?
|
||||||
private var isReconfiguring = false
|
private var isReconfiguring = false
|
||||||
|
private let mediaController = MediaController.shared
|
||||||
|
|
||||||
enum RecorderError: Error {
|
enum RecorderError: Error {
|
||||||
case couldNotStartRecording
|
case couldNotStartRecording
|
||||||
@ -98,6 +99,13 @@ actor Recorder {
|
|||||||
|
|
||||||
func startRecording(toOutputFile url: URL, delegate: AVAudioRecorderDelegate?) async throws {
|
func startRecording(toOutputFile url: URL, delegate: AVAudioRecorderDelegate?) async throws {
|
||||||
logger.info("Starting recording process")
|
logger.info("Starting recording process")
|
||||||
|
|
||||||
|
// Check if media is playing and pause it if needed
|
||||||
|
let wasPaused = await mediaController.pauseMediaIfPlaying()
|
||||||
|
if wasPaused {
|
||||||
|
logger.info("Media playback paused for recording")
|
||||||
|
}
|
||||||
|
|
||||||
// Get the current selected device
|
// Get the current selected device
|
||||||
let deviceID = deviceManager.getCurrentDevice()
|
let deviceID = deviceManager.getCurrentDevice()
|
||||||
if deviceID != 0 {
|
if deviceID != 0 {
|
||||||
@ -142,12 +150,20 @@ actor Recorder {
|
|||||||
if let deviceName = deviceManager.getDeviceName(deviceID: deviceID) {
|
if let deviceName = deviceManager.getDeviceName(deviceID: deviceID) {
|
||||||
logger.error("Current device name: \(deviceName)")
|
logger.error("Current device name: \(deviceName)")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Resume media if we paused it but failed to start recording
|
||||||
|
await mediaController.resumeMediaIfPaused()
|
||||||
|
|
||||||
throw RecorderError.couldNotStartRecording
|
throw RecorderError.couldNotStartRecording
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
logger.error("Error creating AVAudioRecorder: \(error.localizedDescription)")
|
logger.error("Error creating AVAudioRecorder: \(error.localizedDescription)")
|
||||||
logger.error("Recording settings used: \(recordSettings)")
|
logger.error("Recording settings used: \(recordSettings)")
|
||||||
logger.error("Output URL: \(url.path)")
|
logger.error("Output URL: \(url.path)")
|
||||||
|
|
||||||
|
// Resume media if we paused it but failed to start recording
|
||||||
|
await mediaController.resumeMediaIfPaused()
|
||||||
|
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -162,6 +178,11 @@ actor Recorder {
|
|||||||
logger.info("Triggering audio device change notification")
|
logger.info("Triggering audio device change notification")
|
||||||
NotificationCenter.default.post(name: NSNotification.Name("AudioDeviceChanged"), object: nil)
|
NotificationCenter.default.post(name: NSNotification.Name("AudioDeviceChanged"), object: nil)
|
||||||
|
|
||||||
|
// Resume media if we paused it
|
||||||
|
Task {
|
||||||
|
await mediaController.resumeMediaIfPaused()
|
||||||
|
}
|
||||||
|
|
||||||
logger.info("Recording stopped successfully")
|
logger.info("Recording stopped successfully")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
BIN
VoiceInk/Resources/Sounds/pastes.mp3
Executable file → Normal file
BIN
VoiceInk/Resources/Sounds/pastes.mp3
Executable file → Normal file
Binary file not shown.
@ -9,7 +9,7 @@ class SoundManager {
|
|||||||
private var stopSound: AVAudioPlayer?
|
private var stopSound: AVAudioPlayer?
|
||||||
private var escSound: AVAudioPlayer?
|
private var escSound: AVAudioPlayer?
|
||||||
|
|
||||||
@AppStorage("isSoundFeedbackEnabled") private var isSoundFeedbackEnabled = false
|
@AppStorage("isSoundFeedbackEnabled") private var isSoundFeedbackEnabled = true
|
||||||
|
|
||||||
private init() {
|
private init() {
|
||||||
setupSounds()
|
setupSounds()
|
||||||
|
|||||||
@ -97,11 +97,6 @@ struct MiniRecorderView: View {
|
|||||||
.padding(.vertical, 8)
|
.padding(.vertical, 8)
|
||||||
}
|
}
|
||||||
.opacity(windowManager.isVisible ? 1 : 0)
|
.opacity(windowManager.isVisible ? 1 : 0)
|
||||||
.animation(
|
|
||||||
.easeOut(duration: 0.5)
|
|
||||||
.speed(windowManager.isVisible ? 1.0 : 0.8),
|
|
||||||
value: windowManager.isVisible
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -41,15 +41,11 @@ class MiniWindowManager: ObservableObject {
|
|||||||
func hide() {
|
func hide() {
|
||||||
guard isVisible else { return }
|
guard isVisible else { return }
|
||||||
|
|
||||||
withAnimation(.easeOut(duration: 0.5)) {
|
self.isVisible = false
|
||||||
self.isVisible = false
|
|
||||||
}
|
|
||||||
|
|
||||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
|
self.miniPanel?.hide { [weak self] in
|
||||||
self?.miniPanel?.hide { [weak self] in
|
guard let self = self else { return }
|
||||||
guard let self = self else { return }
|
self.deinitializeWindow()
|
||||||
self.deinitializeWindow()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -114,11 +114,6 @@ struct NotchRecorderView: View {
|
|||||||
isHovering = hovering
|
isHovering = hovering
|
||||||
}
|
}
|
||||||
.opacity(windowManager.isVisible ? 1 : 0)
|
.opacity(windowManager.isVisible ? 1 : 0)
|
||||||
.animation(
|
|
||||||
.easeOut(duration: 0.5)
|
|
||||||
.speed(windowManager.isVisible ? 1.0 : 0.8), // Slightly slower when hiding
|
|
||||||
value: windowManager.isVisible
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -238,13 +233,14 @@ struct NotchRecordButton: View {
|
|||||||
.fill(buttonColor)
|
.fill(buttonColor)
|
||||||
.frame(width: 22, height: 22)
|
.frame(width: 22, height: 22)
|
||||||
|
|
||||||
if isRecording {
|
if isProcessing {
|
||||||
|
ProcessingIndicator(color: .white)
|
||||||
|
.frame(width: 14, height: 14)
|
||||||
|
} else {
|
||||||
|
// Show white square for both idle and recording states
|
||||||
RoundedRectangle(cornerRadius: 3)
|
RoundedRectangle(cornerRadius: 3)
|
||||||
.fill(Color.white)
|
.fill(Color.white)
|
||||||
.frame(width: 8, height: 8)
|
.frame(width: 8, height: 8)
|
||||||
} else if isProcessing {
|
|
||||||
ProcessingIndicator(color: .white)
|
|
||||||
.frame(width: 14, height: 14)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -255,10 +251,9 @@ struct NotchRecordButton: View {
|
|||||||
private var buttonColor: Color {
|
private var buttonColor: Color {
|
||||||
if isProcessing {
|
if isProcessing {
|
||||||
return Color(red: 0.4, green: 0.4, blue: 0.45)
|
return Color(red: 0.4, green: 0.4, blue: 0.45)
|
||||||
} else if isRecording {
|
|
||||||
return .red
|
|
||||||
} else {
|
} else {
|
||||||
return Color(red: 0.4, green: 0.4, blue: 0.45)
|
// Use red color for both idle and recording states
|
||||||
|
return .red
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -42,16 +42,13 @@ class NotchWindowManager: ObservableObject {
|
|||||||
func hide() {
|
func hide() {
|
||||||
guard isVisible else { return }
|
guard isVisible else { return }
|
||||||
|
|
||||||
withAnimation(.easeOut(duration: 0.5)) {
|
// Remove animation for instant state change
|
||||||
self.isVisible = false
|
self.isVisible = false
|
||||||
}
|
|
||||||
|
|
||||||
// Wait for animation to complete before cleaning up
|
// Don't wait for animation, clean up immediately
|
||||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
|
self.notchPanel?.hide { [weak self] in
|
||||||
self?.notchPanel?.hide { [weak self] in
|
guard let self = self else { return }
|
||||||
guard let self = self else { return }
|
self.deinitializeWindow()
|
||||||
self.deinitializeWindow()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,7 @@ struct RecordView: View {
|
|||||||
@EnvironmentObject var whisperState: WhisperState
|
@EnvironmentObject var whisperState: WhisperState
|
||||||
@EnvironmentObject var hotkeyManager: HotkeyManager
|
@EnvironmentObject var hotkeyManager: HotkeyManager
|
||||||
@Environment(\.colorScheme) private var colorScheme
|
@Environment(\.colorScheme) private var colorScheme
|
||||||
|
@ObservedObject private var mediaController = MediaController.shared
|
||||||
|
|
||||||
private var hasShortcutSet: Bool {
|
private var hasShortcutSet: Bool {
|
||||||
KeyboardShortcuts.getShortcut(for: .toggleMiniRecorder) != nil
|
KeyboardShortcuts.getShortcut(for: .toggleMiniRecorder) != nil
|
||||||
@ -111,11 +112,21 @@ struct RecordView: View {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
.toggleStyle(.switch)
|
.toggleStyle(.switch)
|
||||||
|
|
||||||
|
Toggle(isOn: $mediaController.isMediaPauseEnabled) {
|
||||||
|
HStack {
|
||||||
|
Image(systemName: "play.slash")
|
||||||
|
.foregroundColor(.secondary)
|
||||||
|
Text("Pause media during recording")
|
||||||
|
.font(.subheadline.weight(.medium))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.toggleStyle(.switch)
|
||||||
|
.help("Automatically pause music playback when recording starts and resume when recording stops")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.padding(24)
|
.padding(24)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private var shortcutSection: some View {
|
private var shortcutSection: some View {
|
||||||
|
|||||||
@ -270,9 +270,14 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
|||||||
.appending(path: "output.wav")
|
.appending(path: "output.wav")
|
||||||
self.logger.info("Created output file at: \(file.path)")
|
self.logger.info("Created output file at: \(file.path)")
|
||||||
|
|
||||||
// Start recording immediately
|
// Only start the audio engine if it's not already running
|
||||||
self.logger.info("Starting audio engine")
|
// (it might have been started in parallel by handleToggleMiniRecorder)
|
||||||
self.audioEngine.startAudioEngine()
|
if !self.audioEngine.isRunning {
|
||||||
|
self.logger.info("Starting audio engine")
|
||||||
|
self.audioEngine.startAudioEngine()
|
||||||
|
} else {
|
||||||
|
self.logger.info("Audio engine already running")
|
||||||
|
}
|
||||||
|
|
||||||
self.logger.info("Initializing recorder")
|
self.logger.info("Initializing recorder")
|
||||||
try await self.recorder.startRecording(toOutputFile: file, delegate: self)
|
try await self.recorder.startRecording(toOutputFile: file, delegate: self)
|
||||||
@ -656,10 +661,21 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
|||||||
await toggleRecord()
|
await toggleRecord()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// If the recorder is not visible, show it and start recording
|
// Start a parallel task for both UI and recording
|
||||||
showRecorderPanel()
|
|
||||||
isMiniRecorderVisible = true
|
|
||||||
Task {
|
Task {
|
||||||
|
// Play start sound first
|
||||||
|
SoundManager.shared.playStartSound()
|
||||||
|
|
||||||
|
// Start audio engine immediately - this can happen in parallel
|
||||||
|
audioEngine.startAudioEngine()
|
||||||
|
|
||||||
|
// Show UI (this is quick now that we removed animations)
|
||||||
|
await MainActor.run {
|
||||||
|
showRecorderPanel() // Modified version that doesn't start audio engine
|
||||||
|
isMiniRecorderVisible = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start recording (this will happen in parallel with UI showing)
|
||||||
await toggleRecord()
|
await toggleRecord()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -680,8 +696,8 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
|||||||
}
|
}
|
||||||
miniWindowManager?.show()
|
miniWindowManager?.show()
|
||||||
}
|
}
|
||||||
audioEngine.startAudioEngine()
|
// Audio engine is now started separately in handleToggleMiniRecorder
|
||||||
SoundManager.shared.playStartSound()
|
// SoundManager.shared.playStartSound() - Moved to handleToggleMiniRecorder
|
||||||
logger.info("Recorder panel shown successfully")
|
logger.info("Recorder panel shown successfully")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -702,9 +718,23 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
|||||||
if isMiniRecorderVisible {
|
if isMiniRecorderVisible {
|
||||||
await dismissMiniRecorder()
|
await dismissMiniRecorder()
|
||||||
} else {
|
} else {
|
||||||
showRecorderPanel()
|
// Start a parallel task for both UI and recording
|
||||||
isMiniRecorderVisible = true
|
Task {
|
||||||
await toggleRecord()
|
// Play start sound first
|
||||||
|
SoundManager.shared.playStartSound()
|
||||||
|
|
||||||
|
// Start audio engine immediately - this can happen in parallel
|
||||||
|
audioEngine.startAudioEngine()
|
||||||
|
|
||||||
|
// Show UI (this is quick now that we removed animations)
|
||||||
|
await MainActor.run {
|
||||||
|
showRecorderPanel() // Modified version that doesn't start audio engine
|
||||||
|
isMiniRecorderVisible = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start recording
|
||||||
|
await toggleRecord()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -737,10 +767,10 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
|||||||
miniWindowManager?.hide()
|
miniWindowManager?.hide()
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Wait for animation to complete
|
// 3. No need to wait for animation since we removed it
|
||||||
try? await Task.sleep(nanoseconds: 700_000_000) // 0.7 seconds
|
// try? await Task.sleep(nanoseconds: 700_000_000) // 0.7 seconds
|
||||||
|
|
||||||
// 4. Only after animation, clean up all states
|
// 4. Clean up states immediately
|
||||||
await MainActor.run {
|
await MainActor.run {
|
||||||
logger.info("Cleaning up recorder states")
|
logger.info("Cleaning up recorder states")
|
||||||
// Reset all states
|
// Reset all states
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user