Merge pull request #438 from Beingpax/AV-AudioEngine-Setup
Avaudioengine setup
This commit is contained in:
commit
80d4dff640
348
VoiceInk/AudioEngineRecorder.swift
Normal file
348
VoiceInk/AudioEngineRecorder.swift
Normal file
@ -0,0 +1,348 @@
|
||||
import Foundation
|
||||
@preconcurrency import AVFoundation
|
||||
import CoreAudio
|
||||
import os
|
||||
|
||||
@MainActor
|
||||
class AudioEngineRecorder: ObservableObject {
|
||||
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "AudioEngineRecorder")
|
||||
|
||||
private var audioEngine: AVAudioEngine?
|
||||
private var inputNode: AVAudioInputNode?
|
||||
|
||||
nonisolated(unsafe) private var audioFile: AVAudioFile?
|
||||
nonisolated(unsafe) private var recordingFormat: AVAudioFormat?
|
||||
nonisolated(unsafe) private var converter: AVAudioConverter?
|
||||
|
||||
private var isRecording = false
|
||||
private var recordingURL: URL?
|
||||
|
||||
@Published var currentAveragePower: Float = 0.0
|
||||
@Published var currentPeakPower: Float = 0.0
|
||||
|
||||
private let tapBufferSize: AVAudioFrameCount = 4096
|
||||
private let tapBusNumber: AVAudioNodeBus = 0
|
||||
|
||||
private let audioProcessingQueue = DispatchQueue(label: "com.prakashjoshipax.VoiceInk.audioProcessing", qos: .userInitiated)
|
||||
private let fileWriteLock = NSLock()
|
||||
|
||||
// Callback to notify parent class of runtime recording errors
|
||||
var onRecordingError: ((Error) -> Void)?
|
||||
|
||||
init() {
|
||||
setupNotifications()
|
||||
}
|
||||
|
||||
private func setupNotifications() {
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(handleConfigurationChange),
|
||||
name: .AVAudioEngineConfigurationChange,
|
||||
object: nil
|
||||
)
|
||||
}
|
||||
|
||||
@objc private func handleConfigurationChange(notification: Notification) {
|
||||
Task { @MainActor in
|
||||
guard isRecording else { return }
|
||||
logger.info("⚠️ AVAudioEngine configuration change detected (e.g. sample rate change). Restarting engine...")
|
||||
do {
|
||||
try restartRecordingPreservingFile()
|
||||
} catch {
|
||||
logger.error("Failed to recover from configuration change: \(error.localizedDescription)")
|
||||
stopRecording()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func startRecording(toOutputFile url: URL) throws {
|
||||
stopRecording()
|
||||
|
||||
let engine = AVAudioEngine()
|
||||
audioEngine = engine
|
||||
|
||||
let input = engine.inputNode
|
||||
inputNode = input
|
||||
|
||||
let inputFormat = input.outputFormat(forBus: tapBusNumber)
|
||||
|
||||
guard inputFormat.sampleRate > 0, inputFormat.channelCount > 0 else {
|
||||
logger.error("Invalid input format: sample rate or channel count is zero")
|
||||
throw AudioEngineRecorderError.invalidInputFormat
|
||||
}
|
||||
|
||||
guard let desiredFormat = AVAudioFormat(
|
||||
commonFormat: .pcmFormatInt16,
|
||||
sampleRate: 16000.0,
|
||||
channels: 1,
|
||||
interleaved: false
|
||||
) else {
|
||||
logger.error("Failed to create desired recording format")
|
||||
throw AudioEngineRecorderError.invalidRecordingFormat
|
||||
}
|
||||
|
||||
recordingURL = url
|
||||
|
||||
let createdAudioFile: AVAudioFile
|
||||
do {
|
||||
if FileManager.default.fileExists(atPath: url.path) {
|
||||
try FileManager.default.removeItem(at: url)
|
||||
}
|
||||
|
||||
createdAudioFile = try AVAudioFile(
|
||||
forWriting: url,
|
||||
settings: desiredFormat.settings,
|
||||
commonFormat: desiredFormat.commonFormat,
|
||||
interleaved: desiredFormat.isInterleaved
|
||||
)
|
||||
} catch {
|
||||
logger.error("Failed to create audio file: \(error.localizedDescription)")
|
||||
throw AudioEngineRecorderError.failedToCreateFile(error)
|
||||
}
|
||||
|
||||
guard let audioConverter = AVAudioConverter(from: inputFormat, to: desiredFormat) else {
|
||||
logger.error("Failed to create audio format converter")
|
||||
throw AudioEngineRecorderError.failedToCreateConverter
|
||||
}
|
||||
|
||||
fileWriteLock.lock()
|
||||
recordingFormat = desiredFormat
|
||||
audioFile = createdAudioFile
|
||||
converter = audioConverter
|
||||
fileWriteLock.unlock()
|
||||
|
||||
input.installTap(onBus: tapBusNumber, bufferSize: tapBufferSize, format: inputFormat) { [weak self] (buffer, time) in
|
||||
guard let self = self else { return }
|
||||
|
||||
self.audioProcessingQueue.async {
|
||||
self.processAudioBuffer(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
engine.prepare()
|
||||
|
||||
do {
|
||||
try engine.start()
|
||||
isRecording = true
|
||||
} catch {
|
||||
logger.error("Failed to start audio engine: \(error.localizedDescription)")
|
||||
input.removeTap(onBus: tapBusNumber)
|
||||
throw AudioEngineRecorderError.failedToStartEngine(error)
|
||||
}
|
||||
}
|
||||
|
||||
private func restartRecordingPreservingFile() throws {
|
||||
if let input = inputNode {
|
||||
input.removeTap(onBus: tapBusNumber)
|
||||
}
|
||||
audioEngine?.stop()
|
||||
|
||||
// Drain queue to prevent old-format buffers racing with new converter
|
||||
audioProcessingQueue.sync { }
|
||||
|
||||
let engine = AVAudioEngine()
|
||||
audioEngine = engine
|
||||
|
||||
let input = engine.inputNode
|
||||
inputNode = input
|
||||
|
||||
let inputFormat = input.outputFormat(forBus: tapBusNumber)
|
||||
logger.info("Restarting with new input format - Sample Rate: \(inputFormat.sampleRate)")
|
||||
|
||||
guard inputFormat.sampleRate > 0 else {
|
||||
throw AudioEngineRecorderError.invalidInputFormat
|
||||
}
|
||||
|
||||
guard let format = recordingFormat else {
|
||||
throw AudioEngineRecorderError.invalidRecordingFormat
|
||||
}
|
||||
|
||||
guard let newConverter = AVAudioConverter(from: inputFormat, to: format) else {
|
||||
throw AudioEngineRecorderError.failedToCreateConverter
|
||||
}
|
||||
|
||||
fileWriteLock.lock()
|
||||
converter = newConverter
|
||||
fileWriteLock.unlock()
|
||||
|
||||
input.installTap(onBus: tapBusNumber, bufferSize: tapBufferSize, format: inputFormat) { [weak self] (buffer, time) in
|
||||
guard let self = self else { return }
|
||||
self.audioProcessingQueue.async {
|
||||
self.processAudioBuffer(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
engine.prepare()
|
||||
try engine.start()
|
||||
logger.info("✅ Audio engine successfully restarted after configuration change")
|
||||
}
|
||||
|
||||
func stopRecording() {
|
||||
guard isRecording else {
|
||||
return
|
||||
}
|
||||
|
||||
if let input = inputNode {
|
||||
input.removeTap(onBus: tapBusNumber)
|
||||
}
|
||||
|
||||
audioEngine?.stop()
|
||||
|
||||
// Wait for pending buffers to finish processing before clearing resources
|
||||
audioProcessingQueue.sync { }
|
||||
|
||||
fileWriteLock.lock()
|
||||
audioFile = nil
|
||||
converter = nil
|
||||
recordingFormat = nil
|
||||
fileWriteLock.unlock()
|
||||
|
||||
audioEngine = nil
|
||||
inputNode = nil
|
||||
recordingURL = nil
|
||||
isRecording = false
|
||||
|
||||
currentAveragePower = 0.0
|
||||
currentPeakPower = 0.0
|
||||
}
|
||||
|
||||
nonisolated private func processAudioBuffer(_ buffer: AVAudioPCMBuffer) {
|
||||
updateMeters(from: buffer)
|
||||
writeBufferToFile(buffer)
|
||||
}
|
||||
|
||||
nonisolated private func writeBufferToFile(_ buffer: AVAudioPCMBuffer) {
|
||||
fileWriteLock.lock()
|
||||
defer { fileWriteLock.unlock() }
|
||||
|
||||
guard let audioFile = audioFile,
|
||||
let converter = converter,
|
||||
let format = recordingFormat else {
|
||||
return
|
||||
}
|
||||
|
||||
let inputSampleRate = buffer.format.sampleRate
|
||||
let outputSampleRate = format.sampleRate
|
||||
let ratio = outputSampleRate / inputSampleRate
|
||||
let outputCapacity = AVAudioFrameCount(Double(buffer.frameLength) * ratio)
|
||||
|
||||
guard let convertedBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: outputCapacity) else {
|
||||
logger.error("Failed to create converted buffer")
|
||||
Task { @MainActor in
|
||||
self.onRecordingError?(AudioEngineRecorderError.bufferConversionFailed)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var error: NSError?
|
||||
var hasProvidedBuffer = false
|
||||
|
||||
converter.convert(to: convertedBuffer, error: &error) { inNumPackets, outStatus in
|
||||
if hasProvidedBuffer {
|
||||
outStatus.pointee = .noDataNow
|
||||
return nil
|
||||
} else {
|
||||
hasProvidedBuffer = true
|
||||
outStatus.pointee = .haveData
|
||||
return buffer
|
||||
}
|
||||
}
|
||||
|
||||
if let error = error {
|
||||
logger.error("Audio conversion error: \(error.localizedDescription)")
|
||||
Task { @MainActor in
|
||||
self.onRecordingError?(AudioEngineRecorderError.audioConversionError(error))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try audioFile.write(from: convertedBuffer)
|
||||
} catch {
|
||||
logger.error("Failed to write buffer to file: \(error.localizedDescription)")
|
||||
Task { @MainActor in
|
||||
self.onRecordingError?(AudioEngineRecorderError.fileWriteFailed(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated private func updateMeters(from buffer: AVAudioPCMBuffer) {
|
||||
guard let channelData = buffer.floatChannelData else { return }
|
||||
|
||||
let channelCount = Int(buffer.format.channelCount)
|
||||
let frameLength = Int(buffer.frameLength)
|
||||
|
||||
guard channelCount > 0, frameLength > 0 else { return }
|
||||
|
||||
let channel = channelData[0]
|
||||
var sum: Float = 0.0
|
||||
var peak: Float = 0.0
|
||||
|
||||
for frame in 0..<frameLength {
|
||||
let sample = channel[frame]
|
||||
let absSample = abs(sample)
|
||||
|
||||
if absSample > peak {
|
||||
peak = absSample
|
||||
}
|
||||
|
||||
sum += sample * sample
|
||||
}
|
||||
|
||||
let rms = sqrt(sum / Float(frameLength))
|
||||
|
||||
let averagePowerDb = 20.0 * log10(max(rms, 0.000001))
|
||||
let peakPowerDb = 20.0 * log10(max(peak, 0.000001))
|
||||
|
||||
Task { @MainActor in
|
||||
self.currentAveragePower = averagePowerDb
|
||||
self.currentPeakPower = peakPowerDb
|
||||
}
|
||||
}
|
||||
|
||||
var isCurrentlyRecording: Bool {
|
||||
return isRecording
|
||||
}
|
||||
|
||||
var currentRecordingURL: URL? {
|
||||
return recordingURL
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Error Types
|
||||
|
||||
enum AudioEngineRecorderError: LocalizedError {
|
||||
case invalidInputFormat
|
||||
case invalidRecordingFormat
|
||||
case failedToCreateFile(Error)
|
||||
case failedToCreateConverter
|
||||
case failedToStartEngine(Error)
|
||||
case bufferConversionFailed
|
||||
case audioConversionError(Error)
|
||||
case fileWriteFailed(Error)
|
||||
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case .invalidInputFormat:
|
||||
return "Invalid audio input format from device"
|
||||
case .invalidRecordingFormat:
|
||||
return "Failed to create recording format"
|
||||
case .failedToCreateFile(let error):
|
||||
return "Failed to create audio file: \(error.localizedDescription)"
|
||||
case .failedToCreateConverter:
|
||||
return "Failed to create audio format converter"
|
||||
case .failedToStartEngine(let error):
|
||||
return "Failed to start audio engine: \(error.localizedDescription)"
|
||||
case .bufferConversionFailed:
|
||||
return "Failed to create buffer for audio conversion"
|
||||
case .audioConversionError(let error):
|
||||
return "Audio format conversion failed: \(error.localizedDescription)"
|
||||
case .fileWriteFailed(let error):
|
||||
return "Failed to write audio data to file: \(error.localizedDescription)"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4,8 +4,8 @@ import CoreAudio
|
||||
import os
|
||||
|
||||
@MainActor
|
||||
class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
private var recorder: AVAudioRecorder?
|
||||
class Recorder: NSObject, ObservableObject {
|
||||
private var recorder: AudioEngineRecorder?
|
||||
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "Recorder")
|
||||
private let deviceManager = AudioDeviceManager.shared
|
||||
private var deviceObserver: NSObjectProtocol?
|
||||
@ -39,7 +39,7 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
isReconfiguring = true
|
||||
|
||||
if recorder != nil {
|
||||
let currentURL = recorder?.url
|
||||
let currentURL = recorder?.currentRecordingURL
|
||||
stopRecording()
|
||||
|
||||
if let url = currentURL {
|
||||
@ -86,26 +86,21 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
let recordSettings: [String: Any] = [
|
||||
AVFormatIDKey: Int(kAudioFormatLinearPCM),
|
||||
AVSampleRateKey: 16000.0,
|
||||
AVNumberOfChannelsKey: 1,
|
||||
AVLinearPCMBitDepthKey: 16,
|
||||
AVLinearPCMIsFloatKey: false,
|
||||
AVLinearPCMIsBigEndianKey: false,
|
||||
AVLinearPCMIsNonInterleaved: false
|
||||
]
|
||||
|
||||
do {
|
||||
recorder = try AVAudioRecorder(url: url, settings: recordSettings)
|
||||
recorder?.delegate = self
|
||||
recorder?.isMeteringEnabled = true
|
||||
let engineRecorder = AudioEngineRecorder()
|
||||
recorder = engineRecorder
|
||||
|
||||
if recorder?.record() == false {
|
||||
logger.error("❌ Could not start recording")
|
||||
throw RecorderError.couldNotStartRecording
|
||||
// Set up error callback to handle runtime recording failures
|
||||
engineRecorder.onRecordingError = { [weak self] error in
|
||||
Task { @MainActor in
|
||||
await self?.handleRecordingError(error)
|
||||
}
|
||||
}
|
||||
|
||||
try engineRecorder.startRecording(toOutputFile: url)
|
||||
|
||||
logger.info("✅ AudioEngineRecorder started successfully")
|
||||
|
||||
Task { [weak self] in
|
||||
guard let self = self else { return }
|
||||
await self.playbackController.pauseMedia()
|
||||
@ -118,7 +113,7 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
audioMeterUpdateTask = Task {
|
||||
while recorder != nil && !Task.isCancelled {
|
||||
updateAudioMeter()
|
||||
try? await Task.sleep(nanoseconds: 33_000_000)
|
||||
try? await Task.sleep(nanoseconds: 17_000_000)
|
||||
}
|
||||
}
|
||||
|
||||
@ -153,7 +148,7 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
func stopRecording() {
|
||||
audioLevelCheckTask?.cancel()
|
||||
audioMeterUpdateTask?.cancel()
|
||||
recorder?.stop()
|
||||
recorder?.stopRecording()
|
||||
recorder = nil
|
||||
audioMeter = AudioMeter(averagePower: 0, peakPower: 0)
|
||||
|
||||
@ -165,12 +160,26 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
deviceManager.isRecordingActive = false
|
||||
}
|
||||
|
||||
private func handleRecordingError(_ error: Error) async {
|
||||
logger.error("❌ Recording error occurred: \(error.localizedDescription)")
|
||||
|
||||
// Stop the recording
|
||||
stopRecording()
|
||||
|
||||
// Notify the user about the recording failure
|
||||
await MainActor.run {
|
||||
NotificationManager.shared.showNotification(
|
||||
title: "Recording Failed: \(error.localizedDescription)",
|
||||
type: .error
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateAudioMeter() {
|
||||
guard let recorder = recorder else { return }
|
||||
recorder.updateMeters()
|
||||
|
||||
let averagePower = recorder.averagePower(forChannel: 0)
|
||||
let peakPower = recorder.peakPower(forChannel: 0)
|
||||
let averagePower = recorder.currentAveragePower
|
||||
let peakPower = recorder.currentPeakPower
|
||||
|
||||
let minVisibleDb: Float = -60.0
|
||||
let maxVisibleDb: Float = 0.0
|
||||
@ -202,31 +211,7 @@ class Recorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
audioMeter = newAudioMeter
|
||||
}
|
||||
|
||||
// MARK: - AVAudioRecorderDelegate
|
||||
|
||||
nonisolated func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
|
||||
if !flag {
|
||||
logger.error("❌ Recording finished unsuccessfully - file may be corrupted or empty")
|
||||
Task { @MainActor in
|
||||
NotificationManager.shared.showNotification(
|
||||
title: "Recording failed - audio file corrupted",
|
||||
type: .error
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
|
||||
if let error = error {
|
||||
logger.error("❌ Recording encode error during session: \(error.localizedDescription)")
|
||||
Task { @MainActor in
|
||||
NotificationManager.shared.showNotification(
|
||||
title: "Recording error: \(error.localizedDescription)",
|
||||
type: .error
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
// MARK: - Cleanup
|
||||
|
||||
deinit {
|
||||
audioLevelCheckTask?.cancel()
|
||||
|
||||
@ -10,7 +10,6 @@ struct PrioritizedDevice: Codable, Identifiable {
|
||||
}
|
||||
|
||||
enum AudioInputMode: String, CaseIterable {
|
||||
case systemDefault = "System Default"
|
||||
case custom = "Custom Device"
|
||||
case prioritized = "Prioritized"
|
||||
}
|
||||
@ -19,7 +18,7 @@ class AudioDeviceManager: ObservableObject {
|
||||
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "AudioDeviceManager")
|
||||
@Published var availableDevices: [(id: AudioDeviceID, uid: String, name: String)] = []
|
||||
@Published var selectedDeviceID: AudioDeviceID?
|
||||
@Published var inputMode: AudioInputMode = .systemDefault
|
||||
@Published var inputMode: AudioInputMode = .custom
|
||||
@Published var prioritizedDevices: [PrioritizedDevice] = []
|
||||
var fallbackDeviceID: AudioDeviceID?
|
||||
|
||||
@ -30,18 +29,39 @@ class AudioDeviceManager: ObservableObject {
|
||||
init() {
|
||||
setupFallbackDevice()
|
||||
loadPrioritizedDevices()
|
||||
loadAvailableDevices { [weak self] in
|
||||
self?.initializeSelectedDevice()
|
||||
}
|
||||
|
||||
if let savedMode = UserDefaults.standard.audioInputModeRawValue,
|
||||
let mode = AudioInputMode(rawValue: savedMode) {
|
||||
inputMode = mode
|
||||
} else {
|
||||
inputMode = .custom
|
||||
}
|
||||
|
||||
loadAvailableDevices { [weak self] in
|
||||
self?.migrateFromSystemDefaultIfNeeded()
|
||||
self?.initializeSelectedDevice()
|
||||
}
|
||||
|
||||
setupDeviceChangeNotifications()
|
||||
}
|
||||
|
||||
private func migrateFromSystemDefaultIfNeeded() {
|
||||
if let savedModeRaw = UserDefaults.standard.audioInputModeRawValue,
|
||||
savedModeRaw == "System Default" {
|
||||
logger.info("Migrating from System Default mode to Custom mode")
|
||||
|
||||
if let fallbackID = fallbackDeviceID {
|
||||
selectedDeviceID = fallbackID
|
||||
if let device = availableDevices.first(where: { $0.id == fallbackID }) {
|
||||
UserDefaults.standard.selectedAudioDeviceUID = device.uid
|
||||
logger.info("Migrated to Custom mode with device: \(device.name)")
|
||||
}
|
||||
}
|
||||
|
||||
UserDefaults.standard.audioInputModeRawValue = AudioInputMode.custom.rawValue
|
||||
}
|
||||
}
|
||||
|
||||
func setupFallbackDevice() {
|
||||
let deviceID: AudioDeviceID? = getDeviceProperty(
|
||||
deviceID: AudioObjectID(kAudioObjectSystemObject),
|
||||
@ -86,13 +106,39 @@ class AudioDeviceManager: ObservableObject {
|
||||
}
|
||||
|
||||
private func fallbackToDefaultDevice() {
|
||||
logger.info("Temporarily falling back to system default input device – user preference remains intact.")
|
||||
logger.info("Current device unavailable, selecting new device...")
|
||||
|
||||
if let currentID = selectedDeviceID, !isDeviceAvailable(currentID) {
|
||||
guard let newDeviceID = findBestAvailableDevice() else {
|
||||
logger.error("No input devices available!")
|
||||
selectedDeviceID = nil
|
||||
notifyDeviceChange()
|
||||
return
|
||||
}
|
||||
|
||||
notifyDeviceChange()
|
||||
let newDeviceName = getDeviceName(deviceID: newDeviceID) ?? "Unknown Device"
|
||||
logger.info("Auto-selecting new device: \(newDeviceName)")
|
||||
selectDevice(id: newDeviceID)
|
||||
}
|
||||
|
||||
func findBestAvailableDevice() -> AudioDeviceID? {
|
||||
if let device = availableDevices.first(where: { isBuiltInDevice($0.id) }) {
|
||||
logger.info("Found built-in device: \(device.name)")
|
||||
return device.id
|
||||
}
|
||||
|
||||
if let device = availableDevices.first {
|
||||
logger.warning("No built-in device found, using first available: \(device.name)")
|
||||
return device.id
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
private func isBuiltInDevice(_ deviceID: AudioDeviceID) -> Bool {
|
||||
guard let uid = getDeviceUID(deviceID: deviceID) else {
|
||||
return false
|
||||
}
|
||||
return uid.contains("BuiltIn")
|
||||
}
|
||||
|
||||
func loadAvailableDevices(completion: (() -> Void)? = nil) {
|
||||
@ -216,6 +262,14 @@ class AudioDeviceManager: ObservableObject {
|
||||
self.selectedDeviceID = id
|
||||
UserDefaults.standard.selectedAudioDeviceUID = uid
|
||||
self.logger.info("Device selection saved with UID: \(uid)")
|
||||
|
||||
do {
|
||||
try AudioDeviceConfiguration.setDefaultInputDevice(id)
|
||||
self.logger.info("✅ Set device as system default immediately")
|
||||
} catch {
|
||||
self.logger.error("Failed to set device as system default: \(error.localizedDescription)")
|
||||
}
|
||||
|
||||
self.notifyDeviceChange()
|
||||
}
|
||||
} else {
|
||||
@ -232,6 +286,14 @@ class AudioDeviceManager: ObservableObject {
|
||||
self.selectedDeviceID = id
|
||||
UserDefaults.standard.audioInputModeRawValue = AudioInputMode.custom.rawValue
|
||||
UserDefaults.standard.selectedAudioDeviceUID = uid
|
||||
|
||||
do {
|
||||
try AudioDeviceConfiguration.setDefaultInputDevice(id)
|
||||
self.logger.info("✅ Set device as system default immediately")
|
||||
} catch {
|
||||
self.logger.error("Failed to set device as system default: \(error.localizedDescription)")
|
||||
}
|
||||
|
||||
self.notifyDeviceChange()
|
||||
}
|
||||
} else {
|
||||
@ -244,10 +306,7 @@ class AudioDeviceManager: ObservableObject {
|
||||
inputMode = mode
|
||||
UserDefaults.standard.audioInputModeRawValue = mode.rawValue
|
||||
|
||||
if mode == .systemDefault {
|
||||
selectedDeviceID = nil
|
||||
UserDefaults.standard.removeObject(forKey: UserDefaults.Keys.selectedAudioDeviceUID)
|
||||
} else if selectedDeviceID == nil {
|
||||
if selectedDeviceID == nil {
|
||||
if inputMode == .custom {
|
||||
if let firstDevice = availableDevices.first {
|
||||
selectDevice(id: firstDevice.id)
|
||||
@ -255,6 +314,15 @@ class AudioDeviceManager: ObservableObject {
|
||||
} else if inputMode == .prioritized {
|
||||
selectHighestPriorityAvailableDevice()
|
||||
}
|
||||
} else {
|
||||
if let currentDeviceID = selectedDeviceID {
|
||||
do {
|
||||
try AudioDeviceConfiguration.setDefaultInputDevice(currentDeviceID)
|
||||
logger.info("✅ Set current device as system default when mode changed")
|
||||
} catch {
|
||||
logger.error("Failed to set device as system default: \(error.localizedDescription)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
notifyDeviceChange()
|
||||
@ -262,13 +330,12 @@ class AudioDeviceManager: ObservableObject {
|
||||
|
||||
func getCurrentDevice() -> AudioDeviceID {
|
||||
switch inputMode {
|
||||
case .systemDefault:
|
||||
return fallbackDeviceID ?? 0
|
||||
case .custom:
|
||||
if let id = selectedDeviceID, isDeviceAvailable(id) {
|
||||
return id
|
||||
} else {
|
||||
return fallbackDeviceID ?? 0
|
||||
// Use smart device finding instead of stale fallback
|
||||
return findBestAvailableDevice() ?? 0
|
||||
}
|
||||
case .prioritized:
|
||||
let sortedDevices = prioritizedDevices.sorted { $0.priority < $1.priority }
|
||||
@ -277,7 +344,8 @@ class AudioDeviceManager: ObservableObject {
|
||||
return available.id
|
||||
}
|
||||
}
|
||||
return fallbackDeviceID ?? 0
|
||||
// Use smart device finding instead of stale fallback
|
||||
return findBestAvailableDevice() ?? 0
|
||||
}
|
||||
}
|
||||
|
||||
@ -341,6 +409,7 @@ class AudioDeviceManager: ObservableObject {
|
||||
|
||||
do {
|
||||
try AudioDeviceConfiguration.setDefaultInputDevice(availableDevice.id)
|
||||
logger.info("✅ Set prioritized device as system default immediately")
|
||||
} catch {
|
||||
logger.error("Failed to set prioritized device: \(error.localizedDescription)")
|
||||
continue
|
||||
|
||||
@ -175,15 +175,9 @@ struct OnboardingPermissionsView: View {
|
||||
}
|
||||
)
|
||||
.onAppear {
|
||||
// Auto-select built-in microphone if no device is selected
|
||||
if audioDeviceManager.selectedDeviceID == nil && !audioDeviceManager.availableDevices.isEmpty {
|
||||
let builtInDevice = audioDeviceManager.availableDevices.first { device in
|
||||
device.name.lowercased().contains("built-in") ||
|
||||
device.name.lowercased().contains("internal")
|
||||
}
|
||||
let deviceToSelect = builtInDevice ?? audioDeviceManager.availableDevices.first
|
||||
if let device = deviceToSelect {
|
||||
audioDeviceManager.selectDevice(id: device.id)
|
||||
if !audioDeviceManager.availableDevices.isEmpty {
|
||||
if let deviceID = audioDeviceManager.findBestAvailableDevice() {
|
||||
audioDeviceManager.selectDevice(id: deviceID)
|
||||
audioDeviceManager.selectInputMode(.custom)
|
||||
withAnimation {
|
||||
permissionStates[currentPermissionIndex] = true
|
||||
@ -278,8 +272,8 @@ struct OnboardingPermissionsView: View {
|
||||
// Check microphone permission
|
||||
permissionStates[0] = AVCaptureDevice.authorizationStatus(for: .audio) == .authorized
|
||||
|
||||
// Check if device is selected or system default mode is being used
|
||||
permissionStates[1] = audioDeviceManager.selectedDeviceID != nil || audioDeviceManager.inputMode == .systemDefault
|
||||
// Check if device is selected
|
||||
permissionStates[1] = audioDeviceManager.selectedDeviceID != nil
|
||||
|
||||
// Check accessibility permission
|
||||
permissionStates[2] = AXIsProcessTrusted()
|
||||
@ -315,7 +309,7 @@ struct OnboardingPermissionsView: View {
|
||||
audioDeviceManager.loadAvailableDevices()
|
||||
|
||||
if audioDeviceManager.availableDevices.isEmpty {
|
||||
audioDeviceManager.selectInputMode(.systemDefault)
|
||||
audioDeviceManager.selectInputMode(.custom)
|
||||
withAnimation {
|
||||
permissionStates[currentPermissionIndex] = true
|
||||
showAnimation = true
|
||||
@ -324,22 +318,12 @@ struct OnboardingPermissionsView: View {
|
||||
return
|
||||
}
|
||||
|
||||
// If no device is selected yet, auto-select the built-in microphone or first available device
|
||||
if audioDeviceManager.selectedDeviceID == nil {
|
||||
let builtInDevice = audioDeviceManager.availableDevices.first { device in
|
||||
device.name.lowercased().contains("built-in") ||
|
||||
device.name.lowercased().contains("internal")
|
||||
}
|
||||
|
||||
let deviceToSelect = builtInDevice ?? audioDeviceManager.availableDevices.first
|
||||
|
||||
if let device = deviceToSelect {
|
||||
audioDeviceManager.selectDevice(id: device.id)
|
||||
audioDeviceManager.selectInputMode(.custom)
|
||||
withAnimation {
|
||||
permissionStates[currentPermissionIndex] = true
|
||||
showAnimation = true
|
||||
}
|
||||
if let deviceID = audioDeviceManager.findBestAvailableDevice() {
|
||||
audioDeviceManager.selectDevice(id: deviceID)
|
||||
audioDeviceManager.selectInputMode(.custom)
|
||||
withAnimation {
|
||||
permissionStates[currentPermissionIndex] = true
|
||||
showAnimation = true
|
||||
}
|
||||
}
|
||||
moveToNext()
|
||||
|
||||
@ -258,7 +258,6 @@ struct InputModeCard: View {
|
||||
|
||||
private var icon: String {
|
||||
switch mode {
|
||||
case .systemDefault: return "macbook.and.iphone"
|
||||
case .custom: return "mic.circle.fill"
|
||||
case .prioritized: return "list.number"
|
||||
}
|
||||
@ -266,7 +265,6 @@ struct InputModeCard: View {
|
||||
|
||||
private var description: String {
|
||||
switch mode {
|
||||
case .systemDefault: return "Use system's default input device"
|
||||
case .custom: return "Select a specific input device"
|
||||
case .prioritized: return "Set up device priority order"
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user