From 2a8ed6d08c27609071db1ec7aa05ba80ab9a39a3 Mon Sep 17 00:00:00 2001 From: Beingpax Date: Wed, 31 Dec 2025 20:51:03 +0545 Subject: [PATCH] Refactor: Extract formatTiming to extension, deduplicate file deletion logic, add AudioPlayerManager cleanup, and implement waveform caching --- VoiceInk/Views/AudioPlayerView.swift | 57 +++++++++++++++---- .../History/TranscriptionHistoryView.swift | 54 +++++++++--------- .../Views/History/TranscriptionListItem.swift | 14 +---- .../History/TranscriptionMetadataView.swift | 18 +----- 4 files changed, 76 insertions(+), 67 deletions(-) diff --git a/VoiceInk/Views/AudioPlayerView.swift b/VoiceInk/Views/AudioPlayerView.swift index e403d19..76209fe 100644 --- a/VoiceInk/Views/AudioPlayerView.swift +++ b/VoiceInk/Views/AudioPlayerView.swift @@ -1,37 +1,63 @@ import SwiftUI import AVFoundation +extension TimeInterval { + func formatTiming() -> String { + if self < 1 { + return String(format: "%.0fms", self * 1000) + } + if self < 60 { + return String(format: "%.1fs", self) + } + let minutes = Int(self) / 60 + let seconds = self.truncatingRemainder(dividingBy: 60) + return String(format: "%dm %.0fs", minutes, seconds) + } +} + class WaveformGenerator { + private static let cache = NSCache() + static func generateWaveformSamples(from url: URL, sampleCount: Int = 200) async -> [Float] { + let cacheKey = url.absoluteString as NSString + + if let cachedSamples = cache.object(forKey: cacheKey) as? [Float] { + return cachedSamples + } guard let audioFile = try? AVAudioFile(forReading: url) else { return [] } let format = audioFile.processingFormat let frameCount = UInt32(audioFile.length) let stride = max(1, Int(frameCount) / sampleCount) let bufferSize = min(UInt32(4096), frameCount) - + guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: bufferSize) else { return [] } - + do { var maxValues = [Float](repeating: 0.0, count: sampleCount) var sampleIndex = 0 var framePosition: AVAudioFramePosition = 0 - + while sampleIndex < sampleCount && framePosition < AVAudioFramePosition(frameCount) { audioFile.framePosition = framePosition try audioFile.read(into: buffer) - + if let channelData = buffer.floatChannelData?[0], buffer.frameLength > 0 { maxValues[sampleIndex] = abs(channelData[0]) sampleIndex += 1 } - + framePosition += AVAudioFramePosition(stride) } - + + let normalizedSamples: [Float] if let maxSample = maxValues.max(), maxSample > 0 { - return maxValues.map { $0 / maxSample } + normalizedSamples = maxValues.map { $0 / maxSample } + } else { + normalizedSamples = maxValues } - return maxValues + + cache.setObject(normalizedSamples as NSArray, forKey: cacheKey) + return normalizedSamples } catch { print("Error reading audio file: \(error)") return [] @@ -94,14 +120,20 @@ class AudioPlayerManager: ObservableObject { } } } - + private func stopTimer() { timer?.invalidate() timer = nil } - - deinit { + + func cleanup() { stopTimer() + audioPlayer?.stop() + audioPlayer = nil + } + + deinit { + cleanup() } } @@ -345,6 +377,9 @@ struct AudioPlayerView: View { .onAppear { playerManager.loadAudio(from: url) } + .onDisappear { + playerManager.cleanup() + } .overlay( VStack { if showRetranscribeSuccess { diff --git a/VoiceInk/Views/History/TranscriptionHistoryView.swift b/VoiceInk/Views/History/TranscriptionHistoryView.swift index bd27746..6c1294b 100644 --- a/VoiceInk/Views/History/TranscriptionHistoryView.swift +++ b/VoiceInk/Views/History/TranscriptionHistoryView.swift @@ -357,53 +357,51 @@ struct TranscriptionHistoryView: View { hasMoreContent = true isLoading = false } - - private func deleteTranscription(_ transcription: Transcription) { + + private func performDeletion(for transcription: Transcription) { if let urlString = transcription.audioFileURL, - let url = URL(string: urlString) { - try? FileManager.default.removeItem(at: url) + let url = URL(string: urlString), + FileManager.default.fileExists(atPath: url.path) { + do { + try FileManager.default.removeItem(at: url) + } catch { + print("Error deleting audio file: \(error.localizedDescription)") + } } - modelContext.delete(transcription) if selectedTranscription == transcription { selectedTranscription = nil } selectedTranscriptions.remove(transcription) + modelContext.delete(transcription) + } + private func saveAndReload() async { + do { + try modelContext.save() + await loadInitialContent() + } catch { + print("Error saving deletion: \(error.localizedDescription)") + await loadInitialContent() + } + } + + private func deleteTranscription(_ transcription: Transcription) { + performDeletion(for: transcription) Task { - do { - try modelContext.save() - await loadInitialContent() - } catch { - print("Error saving deletion: \(error.localizedDescription)") - await loadInitialContent() - } + await saveAndReload() } } private func deleteSelectedTranscriptions() { for transcription in selectedTranscriptions { - if let urlString = transcription.audioFileURL, - let url = URL(string: urlString) { - try? FileManager.default.removeItem(at: url) - } - modelContext.delete(transcription) - if selectedTranscription == transcription { - selectedTranscription = nil - } + performDeletion(for: transcription) } - selectedTranscriptions.removeAll() Task { - do { - try modelContext.save() - await loadInitialContent() - } catch { - print("Error saving deletion: \(error.localizedDescription)") - await loadInitialContent() - } + await saveAndReload() } } diff --git a/VoiceInk/Views/History/TranscriptionListItem.swift b/VoiceInk/Views/History/TranscriptionListItem.swift index 0e68ffd..b9e2411 100644 --- a/VoiceInk/Views/History/TranscriptionListItem.swift +++ b/VoiceInk/Views/History/TranscriptionListItem.swift @@ -23,7 +23,7 @@ struct TranscriptionListItem: View { .foregroundColor(.secondary) Spacer() if transcription.duration > 0 { - Text(formatTiming(transcription.duration)) + Text(transcription.duration.formatTiming()) .font(.system(size: 10, weight: .medium)) .padding(.horizontal, 6) .padding(.vertical, 3) @@ -54,18 +54,6 @@ struct TranscriptionListItem: View { .contentShape(Rectangle()) .onTapGesture { onSelect() } } - - private func formatTiming(_ duration: TimeInterval) -> String { - if duration < 1 { - return String(format: "%.0fms", duration * 1000) - } - if duration < 60 { - return String(format: "%.1fs", duration) - } - let minutes = Int(duration) / 60 - let seconds = duration.truncatingRemainder(dividingBy: 60) - return String(format: "%dm %.0fs", minutes, seconds) - } } struct CircularCheckboxStyle: ToggleStyle { diff --git a/VoiceInk/Views/History/TranscriptionMetadataView.swift b/VoiceInk/Views/History/TranscriptionMetadataView.swift index 6c9b987..3713bd8 100644 --- a/VoiceInk/Views/History/TranscriptionMetadataView.swift +++ b/VoiceInk/Views/History/TranscriptionMetadataView.swift @@ -21,7 +21,7 @@ struct TranscriptionMetadataView: View { metadataRow( icon: "hourglass", label: "Duration", - value: formatTiming(transcription.duration) + value: transcription.duration.formatTiming() ) if let modelName = transcription.transcriptionModelName { @@ -37,7 +37,7 @@ struct TranscriptionMetadataView: View { metadataRow( icon: "clock.fill", label: "Transcription Time", - value: formatTiming(duration) + value: duration.formatTiming() ) } } @@ -55,7 +55,7 @@ struct TranscriptionMetadataView: View { metadataRow( icon: "clock.fill", label: "Enhancement Time", - value: formatTiming(duration) + value: duration.formatTiming() ) } } @@ -155,18 +155,6 @@ struct TranscriptionMetadataView: View { } } - private func formatTiming(_ duration: TimeInterval) -> String { - if duration < 1 { - return String(format: "%.0fms", duration * 1000) - } - if duration < 60 { - return String(format: "%.1fs", duration) - } - let minutes = Int(duration) / 60 - let seconds = duration.truncatingRemainder(dividingBy: 60) - return String(format: "%dm %.0fs", minutes, seconds) - } - private func powerModeDisplay(name: String?, emoji: String?) -> String? { guard name != nil || emoji != nil else { return nil }