Fix logs showing private instead of actual transcription text
This commit is contained in:
parent
804fae7c01
commit
489f2eaf0c
@ -306,9 +306,9 @@ class WhisperState: NSObject, ObservableObject {
|
|||||||
|
|
||||||
let transcriptionStart = Date()
|
let transcriptionStart = Date()
|
||||||
var text = try await transcriptionService.transcribe(audioURL: url, model: model)
|
var text = try await transcriptionService.transcribe(audioURL: url, model: model)
|
||||||
logger.notice("📝 Raw transcript: \(text)")
|
logger.notice("📝 Raw transcript: \(text, privacy: .public)")
|
||||||
text = TranscriptionOutputFilter.filter(text)
|
text = TranscriptionOutputFilter.filter(text)
|
||||||
logger.notice("📝 Output filter result: \(text)")
|
logger.notice("📝 Output filter result: \(text, privacy: .public)")
|
||||||
let transcriptionDuration = Date().timeIntervalSince(transcriptionStart)
|
let transcriptionDuration = Date().timeIntervalSince(transcriptionStart)
|
||||||
|
|
||||||
let powerModeManager = PowerModeManager.shared
|
let powerModeManager = PowerModeManager.shared
|
||||||
@ -322,12 +322,12 @@ class WhisperState: NSObject, ObservableObject {
|
|||||||
|
|
||||||
if UserDefaults.standard.object(forKey: "IsTextFormattingEnabled") as? Bool ?? true {
|
if UserDefaults.standard.object(forKey: "IsTextFormattingEnabled") as? Bool ?? true {
|
||||||
text = WhisperTextFormatter.format(text)
|
text = WhisperTextFormatter.format(text)
|
||||||
logger.notice("📝 Formatted transcript: \(text)")
|
logger.notice("📝 Formatted transcript: \(text, privacy: .public)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if UserDefaults.standard.bool(forKey: "IsWordReplacementEnabled") {
|
if UserDefaults.standard.bool(forKey: "IsWordReplacementEnabled") {
|
||||||
text = WordReplacementService.shared.applyReplacements(to: text)
|
text = WordReplacementService.shared.applyReplacements(to: text)
|
||||||
logger.notice("📝 WordReplacement: \(text)")
|
logger.notice("📝 WordReplacement: \(text, privacy: .public)")
|
||||||
}
|
}
|
||||||
|
|
||||||
let audioAsset = AVURLAsset(url: url)
|
let audioAsset = AVURLAsset(url: url)
|
||||||
@ -357,7 +357,7 @@ class WhisperState: NSObject, ObservableObject {
|
|||||||
|
|
||||||
do {
|
do {
|
||||||
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(textForAI)
|
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(textForAI)
|
||||||
logger.notice("📝 AI enhancement: \(enhancedText)")
|
logger.notice("📝 AI enhancement: \(enhancedText, privacy: .public)")
|
||||||
transcription.enhancedText = enhancedText
|
transcription.enhancedText = enhancedText
|
||||||
transcription.aiEnhancementModelName = enhancementService.getAIService()?.currentModel
|
transcription.aiEnhancementModelName = enhancementService.getAIService()?.currentModel
|
||||||
transcription.promptName = promptName
|
transcription.promptName = promptName
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user