Merge pull request #296 from gdmka/feat/ai-request

Add capability to see AI request in history view
This commit is contained in:
Prakash Joshi Pax 2025-09-16 08:06:32 +05:45 committed by GitHub
commit eefd8c9b2f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 146 additions and 70 deletions

View File

@ -14,8 +14,10 @@ final class Transcription {
var promptName: String?
var transcriptionDuration: TimeInterval?
var enhancementDuration: TimeInterval?
var aiRequestSystemMessage: String?
var aiRequestUserMessage: String?
init(text: String, duration: TimeInterval, enhancedText: String? = nil, audioFileURL: String? = nil, transcriptionModelName: String? = nil, aiEnhancementModelName: String? = nil, promptName: String? = nil, transcriptionDuration: TimeInterval? = nil, enhancementDuration: TimeInterval? = nil) {
init(text: String, duration: TimeInterval, enhancedText: String? = nil, audioFileURL: String? = nil, transcriptionModelName: String? = nil, aiEnhancementModelName: String? = nil, promptName: String? = nil, transcriptionDuration: TimeInterval? = nil, enhancementDuration: TimeInterval? = nil, aiRequestSystemMessage: String? = nil, aiRequestUserMessage: String? = nil) {
self.id = UUID()
self.text = text
self.enhancedText = enhancedText
@ -27,5 +29,7 @@ final class Transcription {
self.promptName = promptName
self.transcriptionDuration = transcriptionDuration
self.enhancementDuration = enhancementDuration
self.aiRequestSystemMessage = aiRequestSystemMessage
self.aiRequestUserMessage = aiRequestUserMessage
}
}

View File

@ -10,7 +10,7 @@ enum EnhancementPrompt {
class AIEnhancementService: ObservableObject {
private let logger = Logger(subsystem: "com.voiceink.enhancement", category: "AIEnhancementService")
@Published var isEnhancementEnabled: Bool {
didSet {
UserDefaults.standard.set(isEnhancementEnabled, forKey: "isAIEnhancementEnabled")
@ -21,20 +21,20 @@ class AIEnhancementService: ObservableObject {
NotificationCenter.default.post(name: .enhancementToggleChanged, object: nil)
}
}
@Published var useClipboardContext: Bool {
didSet {
UserDefaults.standard.set(useClipboardContext, forKey: "useClipboardContext")
}
}
@Published var useScreenCaptureContext: Bool {
didSet {
UserDefaults.standard.set(useScreenCaptureContext, forKey: "useScreenCaptureContext")
NotificationCenter.default.post(name: .AppSettingsDidChange, object: nil)
}
}
@Published var customPrompts: [CustomPrompt] {
didSet {
if let encoded = try? JSONEncoder().encode(customPrompts) {
@ -42,7 +42,7 @@ class AIEnhancementService: ObservableObject {
}
}
}
@Published var selectedPromptId: UUID? {
didSet {
UserDefaults.standard.set(selectedPromptId?.uuidString, forKey: "selectedPromptId")
@ -50,15 +50,18 @@ class AIEnhancementService: ObservableObject {
NotificationCenter.default.post(name: .promptSelectionChanged, object: nil)
}
}
@Published var lastSystemMessageSent: String?
@Published var lastUserMessageSent: String?
var activePrompt: CustomPrompt? {
allPrompts.first { $0.id == selectedPromptId }
}
var allPrompts: [CustomPrompt] {
return customPrompts
}
private let aiService: AIService
private let screenCaptureService: ScreenCaptureService
private let dictionaryContextService: DictionaryContextService
@ -66,41 +69,41 @@ class AIEnhancementService: ObservableObject {
private let rateLimitInterval: TimeInterval = 1.0
private var lastRequestTime: Date?
private let modelContext: ModelContext
init(aiService: AIService = AIService(), modelContext: ModelContext) {
self.aiService = aiService
self.modelContext = modelContext
self.screenCaptureService = ScreenCaptureService()
self.dictionaryContextService = DictionaryContextService.shared
self.isEnhancementEnabled = UserDefaults.standard.bool(forKey: "isAIEnhancementEnabled")
self.useClipboardContext = UserDefaults.standard.bool(forKey: "useClipboardContext")
self.useScreenCaptureContext = UserDefaults.standard.bool(forKey: "useScreenCaptureContext")
self.customPrompts = PromptMigrationService.migratePromptsIfNeeded()
if let savedPromptId = UserDefaults.standard.string(forKey: "selectedPromptId") {
self.selectedPromptId = UUID(uuidString: savedPromptId)
}
if isEnhancementEnabled && (selectedPromptId == nil || !allPrompts.contains(where: { $0.id == selectedPromptId })) {
self.selectedPromptId = allPrompts.first?.id
}
NotificationCenter.default.addObserver(
self,
selector: #selector(handleAPIKeyChange),
name: .aiProviderKeyChanged,
object: nil
)
initializePredefinedPrompts()
}
deinit {
NotificationCenter.default.removeObserver(self)
}
@objc private func handleAPIKeyChange() {
DispatchQueue.main.async {
self.objectWillChange.send()
@ -109,15 +112,15 @@ class AIEnhancementService: ObservableObject {
}
}
}
func getAIService() -> AIService? {
return aiService
}
var isConfigured: Bool {
aiService.isAPIKeyValid
}
private func waitForRateLimit() async throws {
if let lastRequest = lastRequestTime {
let timeSinceLastRequest = Date().timeIntervalSince(lastRequest)
@ -127,14 +130,14 @@ class AIEnhancementService: ObservableObject {
}
lastRequestTime = Date()
}
private func getSystemMessage(for mode: EnhancementPrompt) -> String {
let selectedText = SelectedTextService.fetchSelectedText()
if let activePrompt = activePrompt,
activePrompt.id == PredefinedPrompts.assistantPromptId,
let selectedText = selectedText, !selectedText.isEmpty {
let selectedTextContext = "\n\nSelected Text: \(selectedText)"
let generalContextSection = "\n\n<CONTEXT_INFORMATION>\(selectedTextContext)\n</CONTEXT_INFORMATION>"
let dictionaryContextSection = if !dictionaryContextService.getDictionaryContext().isEmpty {
@ -144,7 +147,7 @@ class AIEnhancementService: ObservableObject {
}
return activePrompt.promptText + generalContextSection + dictionaryContextSection
}
let clipboardContext = if useClipboardContext,
let clipboardText = NSPasteboard.general.string(forType: .string),
!clipboardText.isEmpty {
@ -152,7 +155,7 @@ class AIEnhancementService: ObservableObject {
} else {
""
}
let screenCaptureContext = if useScreenCaptureContext,
let capturedText = screenCaptureService.lastCapturedText,
!capturedText.isEmpty {
@ -160,21 +163,21 @@ class AIEnhancementService: ObservableObject {
} else {
""
}
let dictionaryContext = dictionaryContextService.getDictionaryContext()
let generalContextSection = if !clipboardContext.isEmpty || !screenCaptureContext.isEmpty {
"\n\n<CONTEXT_INFORMATION>\(clipboardContext)\(screenCaptureContext)\n</CONTEXT_INFORMATION>"
} else {
""
}
let dictionaryContextSection = if !dictionaryContext.isEmpty {
"\n\n<DICTIONARY_CONTEXT>\(dictionaryContext)\n</DICTIONARY_CONTEXT>"
} else {
""
}
guard let activePrompt = activePrompt else {
if let defaultPrompt = allPrompts.first(where: { $0.id == PredefinedPrompts.defaultPromptId }) {
var systemMessage = String(format: AIPrompts.customPromptTemplate, defaultPrompt.promptText)
@ -183,32 +186,36 @@ class AIEnhancementService: ObservableObject {
}
return AIPrompts.assistantMode + generalContextSection + dictionaryContextSection
}
if activePrompt.id == PredefinedPrompts.assistantPromptId {
return activePrompt.promptText + generalContextSection + dictionaryContextSection
}
var systemMessage = String(format: AIPrompts.customPromptTemplate, activePrompt.promptText)
systemMessage += generalContextSection + dictionaryContextSection
return systemMessage
}
private func makeRequest(text: String, mode: EnhancementPrompt) async throws -> String {
guard isConfigured else {
throw EnhancementError.notConfigured
}
guard !text.isEmpty else {
return "" // Silently return empty string instead of throwing error
}
let formattedText = "\n<TRANSCRIPT>\n\(text)\n</TRANSCRIPT>"
let systemMessage = getSystemMessage(for: mode)
// Persist the exact payload being sent (also used for UI)
self.lastSystemMessageSent = systemMessage
self.lastUserMessageSent = formattedText
// Log the message being sent to AI enhancement
logger.notice("AI Enhancement - System Message: \(systemMessage, privacy: .public)")
logger.notice("AI Enhancement - User Message: \(formattedText, privacy: .public)")
if aiService.selectedProvider == .ollama {
do {
let result = try await aiService.enhanceWithOllama(text: formattedText, systemPrompt: systemMessage)
@ -222,9 +229,9 @@ class AIEnhancementService: ObservableObject {
}
}
}
try await waitForRateLimit()
switch aiService.selectedProvider {
case .anthropic:
let requestBody: [String: Any] = [
@ -235,7 +242,7 @@ class AIEnhancementService: ObservableObject {
["role": "user", "content": formattedText]
]
]
var request = URLRequest(url: URL(string: aiService.selectedProvider.baseURL)!)
request.httpMethod = "POST"
request.addValue("application/json", forHTTPHeaderField: "Content-Type")
@ -243,14 +250,14 @@ class AIEnhancementService: ObservableObject {
request.addValue("2023-06-01", forHTTPHeaderField: "anthropic-version")
request.timeoutInterval = baseTimeout
request.httpBody = try? JSONSerialization.data(withJSONObject: requestBody)
do {
let (data, response) = try await URLSession.shared.data(for: request)
guard let httpResponse = response as? HTTPURLResponse else {
throw EnhancementError.invalidResponse
}
if httpResponse.statusCode == 200 {
guard let jsonResponse = try? JSONSerialization.jsonObject(with: data) as? [String: Any],
let content = jsonResponse["content"] as? [[String: Any]],
@ -258,7 +265,7 @@ class AIEnhancementService: ObservableObject {
let enhancedText = firstContent["text"] as? String else {
throw EnhancementError.enhancementFailed
}
let filteredText = AIEnhancementOutputFilter.filter(enhancedText.trimmingCharacters(in: .whitespacesAndNewlines))
return filteredText
} else if httpResponse.statusCode == 429 {
@ -269,7 +276,7 @@ class AIEnhancementService: ObservableObject {
let errorString = String(data: data, encoding: .utf8) ?? "Could not decode error response."
throw EnhancementError.customError("HTTP \(httpResponse.statusCode): \(errorString)")
}
} catch let error as EnhancementError {
throw error
} catch let error as URLError {
@ -277,7 +284,7 @@ class AIEnhancementService: ObservableObject {
} catch {
throw EnhancementError.customError(error.localizedDescription)
}
default:
let url = URL(string: aiService.selectedProvider.baseURL)!
var request = URLRequest(url: url)
@ -336,7 +343,7 @@ class AIEnhancementService: ObservableObject {
}
}
}
private func makeRequestWithRetry(text: String, mode: EnhancementPrompt, maxRetries: Int = 3, initialDelay: TimeInterval = 1.0) async throws -> String {
var retries = 0
var currentDelay = initialDelay
@ -386,7 +393,7 @@ class AIEnhancementService: ObservableObject {
let startTime = Date()
let enhancementPrompt: EnhancementPrompt = .transcriptionEnhancement
let promptName = activePrompt?.title
do {
let result = try await makeRequestWithRetry(text: text, mode: enhancementPrompt)
let endTime = Date()
@ -396,17 +403,17 @@ class AIEnhancementService: ObservableObject {
throw error
}
}
func captureScreenContext() async {
guard useScreenCaptureContext else { return }
if let capturedText = await screenCaptureService.captureAndExtractText() {
await MainActor.run {
self.objectWillChange.send()
}
}
}
func addPrompt(title: String, promptText: String, icon: PromptIcon = .documentFill, description: String? = nil, triggerWords: [String] = []) {
let newPrompt = CustomPrompt(title: title, promptText: promptText, icon: icon, description: description, isPredefined: false, triggerWords: triggerWords)
customPrompts.append(newPrompt)
@ -414,27 +421,27 @@ class AIEnhancementService: ObservableObject {
selectedPromptId = newPrompt.id
}
}
func updatePrompt(_ prompt: CustomPrompt) {
if let index = customPrompts.firstIndex(where: { $0.id == prompt.id }) {
customPrompts[index] = prompt
}
}
func deletePrompt(_ prompt: CustomPrompt) {
customPrompts.removeAll { $0.id == prompt.id }
if selectedPromptId == prompt.id {
selectedPromptId = allPrompts.first?.id
}
}
func setActivePrompt(_ prompt: CustomPrompt) {
selectedPromptId = prompt.id
}
private func initializePredefinedPrompts() {
let predefinedTemplates = PredefinedPrompts.createDefaultPrompts()
for template in predefinedTemplates {
if let existingIndex = customPrompts.firstIndex(where: { $0.id == template.id }) {
var updatedPrompt = customPrompts[existingIndex]

View File

@ -124,6 +124,7 @@ class AudioTranscriptionManager: ObservableObject {
enhancementService.isConfigured {
processingPhase = .enhancing
do {
// inside the enhancement success path where transcription is created
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(text)
let transcription = Transcription(
text: text,
@ -134,7 +135,9 @@ class AudioTranscriptionManager: ObservableObject {
aiEnhancementModelName: enhancementService.getAIService()?.currentModel,
promptName: promptName,
transcriptionDuration: transcriptionDuration,
enhancementDuration: enhancementDuration
enhancementDuration: enhancementDuration,
aiRequestSystemMessage: enhancementService.lastSystemMessageSent,
aiRequestUserMessage: enhancementService.lastUserMessageSent
)
modelContext.insert(transcription)
try modelContext.save()
@ -211,4 +214,4 @@ enum TranscriptionError: Error, LocalizedError {
return "Transcription was cancelled"
}
}
}
}

View File

@ -95,8 +95,8 @@ class AudioTranscriptionService: ObservableObject {
enhancementService.isEnhancementEnabled,
enhancementService.isConfigured {
do {
// inside the enhancement success path where newTranscription is created
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(text)
let newTranscription = Transcription(
text: text,
duration: duration,
@ -106,7 +106,9 @@ class AudioTranscriptionService: ObservableObject {
aiEnhancementModelName: enhancementService.getAIService()?.currentModel,
promptName: promptName,
transcriptionDuration: transcriptionDuration,
enhancementDuration: enhancementDuration
enhancementDuration: enhancementDuration,
aiRequestSystemMessage: enhancementService.lastSystemMessageSent,
aiRequestUserMessage: enhancementService.lastUserMessageSent
)
modelContext.insert(newTranscription)
do {

View File

@ -7,6 +7,7 @@ struct TranscriptionCard: View {
let isSelected: Bool
let onDelete: () -> Void
let onToggleSelection: () -> Void
@State private var isAIRequestExpanded: Bool = false
var body: some View {
HStack(spacing: 12) {
@ -77,6 +78,63 @@ struct TranscriptionCard: View {
}
}
// NEW: AI Request payload (System + User messages) - folded by default
if isExpanded, (transcription.aiRequestSystemMessage != nil || transcription.aiRequestUserMessage != nil) {
Divider()
.padding(.vertical, 8)
VStack(alignment: .leading, spacing: 8) {
HStack(spacing: 6) {
Image(systemName: "paperplane.fill")
.foregroundColor(.purple)
Text("AI Request")
.fontWeight(.semibold)
.foregroundColor(.purple)
Spacer()
}
.contentShape(Rectangle())
.onTapGesture {
withAnimation(.easeInOut) {
isAIRequestExpanded.toggle()
}
}
if isAIRequestExpanded {
VStack(alignment: .leading, spacing: 12) {
if let systemMsg = transcription.aiRequestSystemMessage, !systemMsg.isEmpty {
VStack(alignment: .leading, spacing: 6) {
HStack {
Text("System Prompt")
.font(.system(size: 13, weight: .semibold))
.foregroundColor(.secondary)
Spacer()
AnimatedCopyButton(textToCopy: systemMsg)
}
Text(systemMsg)
.font(.system(size: 13, weight: .regular, design: .monospaced))
.lineSpacing(2)
}
}
if let userMsg = transcription.aiRequestUserMessage, !userMsg.isEmpty {
VStack(alignment: .leading, spacing: 6) {
HStack {
Text("User Message")
.font(.system(size: 13, weight: .semibold))
.foregroundColor(.secondary)
Spacer()
AnimatedCopyButton(textToCopy: userMsg)
}
Text(userMsg)
.font(.system(size: 13, weight: .regular, design: .monospaced))
.lineSpacing(2)
}
}
}
}
}
}
// Audio player (if available)
if isExpanded, let urlString = transcription.audioFileURL,
let url = URL(string: urlString),

View File

@ -298,17 +298,19 @@ class WhisperState: NSObject, ObservableObject {
await MainActor.run { self.recordingState = .enhancing }
let textForAI = promptDetectionResult?.processedText ?? text
let (enhancedText, enhancementDuration, promptName) = try await enhancementService.enhance(textForAI)
let newTranscription = Transcription(
text: originalText,
duration: actualDuration,
enhancedText: enhancedText,
audioFileURL: url.absoluteString,
transcriptionModelName: model.displayName,
aiEnhancementModelName: enhancementService.getAIService()?.currentModel,
promptName: promptName,
transcriptionDuration: transcriptionDuration,
enhancementDuration: enhancementDuration
)
let newTranscription = Transcription(
text: originalText,
duration: actualDuration,
enhancedText: enhancedText,
audioFileURL: url.absoluteString,
transcriptionModelName: model.displayName,
aiEnhancementModelName: enhancementService.getAIService()?.currentModel,
promptName: promptName,
transcriptionDuration: transcriptionDuration,
enhancementDuration: enhancementDuration,
aiRequestSystemMessage: enhancementService.lastSystemMessageSent,
aiRequestUserMessage: enhancementService.lastUserMessageSent
)
modelContext.insert(newTranscription)
try? modelContext.save()
NotificationCenter.default.post(name: .transcriptionCreated, object: newTranscription)