import SwiftUI import AVFoundation class WaveformGenerator { static func generateWaveformSamples(from url: URL, sampleCount: Int = 200) -> [Float] { guard let audioFile = try? AVAudioFile(forReading: url) else { return [] } let format = audioFile.processingFormat // Calculate frame count and read size let frameCount = UInt32(audioFile.length) let samplesPerFrame = frameCount / UInt32(sampleCount) var samples = [Float](repeating: 0.0, count: sampleCount) guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCount) else { return [] } do { try audioFile.read(into: buffer) // Get the raw audio data guard let channelData = buffer.floatChannelData?[0] else { return [] } // Process the samples for i in 0.. 0 { samples = samples.map { $0 / maxSample } } return samples } catch { print("Error reading audio file: \(error)") return [] } } } class AudioPlayerManager: ObservableObject { private var audioPlayer: AVAudioPlayer? @Published var isPlaying = false @Published var currentTime: TimeInterval = 0 @Published var duration: TimeInterval = 0 @Published var waveformSamples: [Float] = [] private var timer: Timer? func loadAudio(from url: URL) { do { audioPlayer = try AVAudioPlayer(contentsOf: url) audioPlayer?.prepareToPlay() duration = audioPlayer?.duration ?? 0 // Generate waveform data waveformSamples = WaveformGenerator.generateWaveformSamples(from: url) } catch { print("Error loading audio: \(error.localizedDescription)") } } func play() { audioPlayer?.play() isPlaying = true startTimer() } func pause() { audioPlayer?.pause() isPlaying = false stopTimer() } func seek(to time: TimeInterval) { audioPlayer?.currentTime = time currentTime = time } private func startTimer() { timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in guard let self = self else { return } self.currentTime = self.audioPlayer?.currentTime ?? 0 if self.currentTime >= self.duration { self.pause() self.seek(to: 0) } } } private func stopTimer() { timer?.invalidate() timer = nil } deinit { stopTimer() } } struct WaveformView: View { let samples: [Float] let currentTime: TimeInterval let duration: TimeInterval var onSeek: (Double) -> Void @State private var isHovering = false @State private var hoverLocation: CGFloat = 0 var body: some View { GeometryReader { geometry in ZStack(alignment: .leading) { // Removed the glass-morphic background and its overlays // Waveform container HStack(spacing: 1) { ForEach(0.. String { let minutes = Int(time) / 60 let seconds = Int(time) % 60 return String(format: "%d:%02d", minutes, seconds) } } struct WaveformBar: View { let sample: Float let isPlayed: Bool let totalBars: Int let geometryWidth: CGFloat let isHovering: Bool let hoverProgress: CGFloat private var barProgress: CGFloat { CGFloat(sample) } private var isNearHover: Bool { let barPosition = CGFloat(geometryWidth) / CGFloat(totalBars) let hoverPosition = hoverProgress * geometryWidth return abs(barPosition - hoverPosition) < 20 } var body: some View { Capsule() .fill( LinearGradient( gradient: Gradient(colors: [ isPlayed ? Color.accentColor : Color.accentColor.opacity(0.3), isPlayed ? Color.accentColor.opacity(0.8) : Color.accentColor.opacity(0.2) ]), startPoint: .bottom, endPoint: .top ) ) .frame( width: max((geometryWidth / CGFloat(totalBars)) - 1, 1), height: max(barProgress * 40, 3) ) .scaleEffect(y: isHovering && isNearHover ? 1.2 : 1.0) .animation(.interpolatingSpring(stiffness: 300, damping: 15), value: isHovering && isNearHover) } } struct AudioPlayerView: View { let url: URL @StateObject private var playerManager = AudioPlayerManager() @State private var isHovering = false @State private var showingTooltip = false var body: some View { VStack(spacing: 16) { // Title and duration HStack { HStack(spacing: 6) { Image(systemName: "waveform") .foregroundStyle(Color.accentColor) Text("Recording") .font(.system(size: 14, weight: .medium)) } .foregroundColor(.secondary) Spacer() Text(formatTime(playerManager.duration)) .font(.system(size: 14, weight: .medium)) .monospacedDigit() .foregroundColor(.secondary) } // Waveform and controls container VStack(spacing: 16) { // Waveform WaveformView( samples: playerManager.waveformSamples, currentTime: playerManager.currentTime, duration: playerManager.duration, onSeek: { time in playerManager.seek(to: time) } ) // Controls HStack(spacing: 20) { // Play/Pause button Button(action: { if playerManager.isPlaying { playerManager.pause() } else { playerManager.play() } }) { Circle() .fill(Color.accentColor.opacity(0.1)) .frame(width: 44, height: 44) .overlay( Image(systemName: playerManager.isPlaying ? "pause.fill" : "play.fill") .font(.system(size: 18, weight: .semibold)) .foregroundStyle(Color.accentColor) .contentTransition(.symbolEffect(.replace.downUp)) ) } .buttonStyle(.plain) .scaleEffect(isHovering ? 1.05 : 1.0) .onHover { hovering in withAnimation(.spring(response: 0.3, dampingFraction: 0.7)) { isHovering = hovering } } // Time Text(formatTime(playerManager.currentTime)) .font(.system(size: 14, weight: .medium)) .monospacedDigit() .foregroundColor(.secondary) } } } .padding(.vertical, 12) .padding(.horizontal, 16) .onAppear { playerManager.loadAudio(from: url) } } private func formatTime(_ time: TimeInterval) -> String { let minutes = Int(time) / 60 let seconds = Int(time) % 60 return String(format: "%d:%02d", minutes, seconds) } }