Log raw, filtered, formatted, replaced, and AI-enhanced text

This commit is contained in:
Beingpax 2025-10-25 09:57:14 +05:45
parent cacd38f339
commit 95790c4a58
2 changed files with 8 additions and 4 deletions

View File

@ -15,7 +15,6 @@ struct TranscriptionOutputFilter {
"hmm", "hm", "mmm", "mm", "mh", "ha", "ehh" "hmm", "hm", "mmm", "mm", "mh", "ha", "ehh"
] ]
static func filter(_ text: String) -> String { static func filter(_ text: String) -> String {
logger.notice("🧹 Filtering hallucinations and filler words")
var filteredText = text var filteredText = text
// Remove <TAG>...</TAG> blocks // Remove <TAG>...</TAG> blocks
@ -48,11 +47,11 @@ struct TranscriptionOutputFilter {
// Log results // Log results
if filteredText != text { if filteredText != text {
logger.notice("✅ Removed hallucinations and filler words") logger.notice("📝 Output filter result: \(filteredText)")
} else { } else {
logger.notice("✅ No hallucinations or filler words found") logger.notice("📝 Output filter result (unchanged): \(filteredText)")
} }
return filteredText return filteredText
} }
} }

View File

@ -293,7 +293,9 @@ class WhisperState: NSObject, ObservableObject {
let transcriptionStart = Date() let transcriptionStart = Date()
var text = try await transcriptionService.transcribe(audioURL: url, model: model) var text = try await transcriptionService.transcribe(audioURL: url, model: model)
logger.notice("📝 Raw transcript: \(text)")
text = TranscriptionOutputFilter.filter(text) text = TranscriptionOutputFilter.filter(text)
logger.notice("📝 Output filter result: \(text)")
let transcriptionDuration = Date().timeIntervalSince(transcriptionStart) let transcriptionDuration = Date().timeIntervalSince(transcriptionStart)
let powerModeManager = PowerModeManager.shared let powerModeManager = PowerModeManager.shared
@ -307,10 +309,12 @@ class WhisperState: NSObject, ObservableObject {
if UserDefaults.standard.object(forKey: "IsTextFormattingEnabled") as? Bool ?? true { if UserDefaults.standard.object(forKey: "IsTextFormattingEnabled") as? Bool ?? true {
text = WhisperTextFormatter.format(text) text = WhisperTextFormatter.format(text)
logger.notice("📝 Formatted transcript: \(text)")
} }
if UserDefaults.standard.bool(forKey: "IsWordReplacementEnabled") { if UserDefaults.standard.bool(forKey: "IsWordReplacementEnabled") {
text = WordReplacementService.shared.applyReplacements(to: text) text = WordReplacementService.shared.applyReplacements(to: text)
logger.notice("📝 WordReplacement: \(text)")
} }
let audioAsset = AVURLAsset(url: url) let audioAsset = AVURLAsset(url: url)
@ -340,6 +344,7 @@ class WhisperState: NSObject, ObservableObject {
do { do {
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(textForAI) let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(textForAI)
logger.notice("📝 AI enhancement: \(enhancedText)")
transcription.enhancedText = enhancedText transcription.enhancedText = enhancedText
transcription.aiEnhancementModelName = enhancementService.getAIService()?.currentModel transcription.aiEnhancementModelName = enhancementService.getAIService()?.currentModel
transcription.promptName = promptName transcription.promptName = promptName