Fix screen capture crash by isolating services

This commit is contained in:
Beingpax 2025-10-18 22:07:48 +05:45
parent 7a7dbaecab
commit 14d52a9c08
5 changed files with 36 additions and 38 deletions

View File

@ -8,6 +8,7 @@ enum EnhancementPrompt {
case aiAssistant
}
@MainActor
class AIEnhancementService: ObservableObject {
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "AIEnhancementService")

View File

@ -106,7 +106,7 @@ class AudioTranscriptionService: ObservableObject {
var promptDetectionResult: PromptDetectionService.PromptDetectionResult? = nil
if let enhancementService = enhancementService, enhancementService.isConfigured {
let detectionResult = promptDetectionService.analyzeText(text, with: enhancementService)
let detectionResult = await promptDetectionService.analyzeText(text, with: enhancementService)
promptDetectionResult = detectionResult
await promptDetectionService.applyDetectionResult(detectionResult, to: enhancementService)
}

View File

@ -16,6 +16,7 @@ class PromptDetectionService {
let originalPromptId: UUID?
}
@MainActor
func analyzeText(_ text: String, with enhancementService: AIEnhancementService) -> PromptDetectionResult {
let originalEnhancementState = enhancementService.isEnhancementEnabled
let originalPromptId = enhancementService.selectedPromptId

View File

@ -4,6 +4,7 @@ import Vision
import os
import ScreenCaptureKit
@MainActor
class ScreenCaptureService: ObservableObject {
@Published var isCapturing = false
@Published var lastCapturedText: String?
@ -60,41 +61,41 @@ class ScreenCaptureService: ObservableObject {
}
}
func extractText(from image: NSImage, completion: @escaping (String?) -> Void) {
private func extractText(from image: NSImage) async -> String? {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
completion(nil)
return
return nil
}
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
let request = VNRecognizeTextRequest { request, error in
if let error = error {
self.logger.notice("📸 Text recognition failed: \(error.localizedDescription, privacy: .public)")
completion(nil)
return
let result: Result<String?, Error> = await Task.detached(priority: .userInitiated) {
let request = VNRecognizeTextRequest()
request.recognitionLevel = .accurate
request.usesLanguageCorrection = true
request.automaticallyDetectsLanguage = true
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
do {
try requestHandler.perform([request])
guard let observations = request.results as? [VNRecognizedTextObservation] else {
return .success(nil)
}
let text = observations
.compactMap { $0.topCandidates(1).first?.string }
.joined(separator: "\n")
return .success(text.isEmpty ? nil : text)
} catch {
return .failure(error)
}
}.value
guard let observations = request.results as? [VNRecognizedTextObservation] else {
completion(nil)
return
}
let text = observations.compactMap { observation in
observation.topCandidates(1).first?.string
}.joined(separator: "\n")
completion(text.isEmpty ? nil : text)
}
request.recognitionLevel = VNRequestTextRecognitionLevel.accurate
request.usesLanguageCorrection = true
request.automaticallyDetectsLanguage = true
do {
try requestHandler.perform([request])
} catch {
switch result {
case .success(let text):
return text
case .failure(let error):
logger.notice("📸 Text recognition failed: \(error.localizedDescription, privacy: .public)")
completion(nil)
return nil
}
}
@ -124,11 +125,7 @@ class ScreenCaptureService: ObservableObject {
"""
if let capturedImage = await captureActiveWindow() {
let extractedText = await withCheckedContinuation({ continuation in
extractText(from: capturedImage) { text in
continuation.resume(returning: text)
}
})
let extractedText = await extractText(from: capturedImage)
if let extractedText = extractedText, !extractedText.isEmpty {
contextText += "Window Content:\n\(extractedText)"

View File

@ -327,7 +327,7 @@ class WhisperState: NSObject, ObservableObject {
finalPastedText = text
if let enhancementService = enhancementService, enhancementService.isConfigured {
let detectionResult = promptDetectionService.analyzeText(text, with: enhancementService)
let detectionResult = await promptDetectionService.analyzeText(text, with: enhancementService)
promptDetectionResult = detectionResult
await promptDetectionService.applyDetectionResult(detectionResult, to: enhancementService)
}
@ -429,4 +429,3 @@ class WhisperState: NSObject, ObservableObject {
await dismissMiniRecorder()
}
}