Merge pull request #490 from Beingpax/cleanup-verbose-logs

Remove verbose logging from cleanup and capture services
This commit is contained in:
Prakash Joshi Pax 2026-01-12 09:22:41 +05:45 committed by GitHub
commit f4054b9b68
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 36 additions and 124 deletions

View File

@ -209,10 +209,6 @@ class AIEnhancementService: ObservableObject {
self.lastUserMessageSent = formattedText
}
// Log the message being sent to AI enhancement
logger.notice("AI Enhancement - System Message: \(systemMessage, privacy: .public)")
logger.notice("AI Enhancement - User Message: \(formattedText, privacy: .public)")
if aiService.selectedProvider == .ollama {
do {
let result = try await aiService.enhanceWithOllama(text: formattedText, systemPrompt: systemMessage)

View File

@ -19,21 +19,18 @@ class CustomModelManager: ObservableObject {
func addCustomModel(_ model: CustomCloudModel) {
customModels.append(model)
saveCustomModels()
logger.info("Added custom model: \(model.displayName)")
}
func removeCustomModel(withId id: UUID) {
customModels.removeAll { $0.id == id }
saveCustomModels()
APIKeyManager.shared.deleteCustomModelAPIKey(forModelId: id)
logger.info("Removed custom model with ID: \(id)")
}
func updateCustomModel(_ updatedModel: CustomCloudModel) {
if let index = customModels.firstIndex(where: { $0.id == updatedModel.id }) {
customModels[index] = updatedModel
saveCustomModels()
logger.info("Updated custom model: \(updatedModel.displayName)")
}
}
@ -41,7 +38,6 @@ class CustomModelManager: ObservableObject {
private func loadCustomModels() {
guard let data = userDefaults.data(forKey: customModelsKey) else {
logger.info("No custom models found in UserDefaults")
return
}

View File

@ -116,10 +116,6 @@ class OllamaService: ObservableObject {
throw LocalAIError.invalidRequest
}
print("\nOllama Enhancement Debug:")
print("Original Text: \(text)")
print("System Prompt: \(systemPrompt)")
let body: [String: Any] = [
"model": selectedModel,
"prompt": text,
@ -139,7 +135,6 @@ class OllamaService: ObservableObject {
switch httpResponse.statusCode {
case 200:
let response = try JSONDecoder().decode(OllamaResponse.self, from: data)
print("Enhanced Text: \(response.response)\n")
return response.response
case 404:
throw LocalAIError.modelNotFound

View File

@ -1,7 +1,6 @@
import Foundation
import AppKit
import Vision
import os
import ScreenCaptureKit
@MainActor
@ -9,11 +8,6 @@ class ScreenCaptureService: ObservableObject {
@Published var isCapturing = false
@Published var lastCapturedText: String?
private let logger = Logger(
subsystem: "com.prakashjoshipax.voiceink",
category: "aienhancement"
)
private struct WindowCandidate {
let title: String
let ownerName: String
@ -88,7 +82,6 @@ class ScreenCaptureService: ObservableObject {
return NSImage(cgImage: cgImage, size: NSSize(width: cgImage.width, height: cgImage.height))
} catch {
logger.notice("📸 Screen capture failed: \(error.localizedDescription, privacy: .public)")
return nil
}
}
@ -125,8 +118,7 @@ class ScreenCaptureService: ObservableObject {
switch result {
case .success(let text):
return text
case .failure(let error):
logger.notice("📸 Text recognition failed: \(error.localizedDescription, privacy: .public)")
case .failure:
return nil
}
}
@ -144,12 +136,9 @@ class ScreenCaptureService: ObservableObject {
}
guard let windowInfo = getActiveWindowInfo() else {
logger.notice("📸 No active window found")
return nil
}
logger.notice("📸 Capturing: \(windowInfo.title, privacy: .public) (\(windowInfo.ownerName, privacy: .public))")
var contextText = """
Active Window: \(windowInfo.title)
Application: \(windowInfo.ownerName)
@ -161,11 +150,8 @@ class ScreenCaptureService: ObservableObject {
if let extractedText = extractedText, !extractedText.isEmpty {
contextText += "Window Content:\n\(extractedText)"
let preview = String(extractedText.prefix(100))
logger.notice("📸 Text extracted: \(preview, privacy: .public)\(extractedText.count > 100 ? "..." : "")")
} else {
contextText += "Window Content:\nNo text detected via OCR"
logger.notice("📸 No text extracted from window")
}
await MainActor.run {
@ -175,7 +161,6 @@ class ScreenCaptureService: ObservableObject {
return contextText
}
logger.notice("📸 Window capture failed")
return nil
}
}

View File

@ -1,26 +1,20 @@
import Foundation
import SwiftData
import OSLog
/// A utility class that manages automatic cleanup of audio files while preserving transcript data
class AudioCleanupManager {
static let shared = AudioCleanupManager()
private let logger = Logger(subsystem: "com.prakashjoshipax.voiceink", category: "AudioCleanupManager")
private var cleanupTimer: Timer?
// Default cleanup settings
private let defaultRetentionDays = 7
private let cleanupCheckInterval: TimeInterval = 86400 // Check once per day (in seconds)
private init() {
logger.info("AudioCleanupManager initialized")
}
private init() {}
/// Start the automatic cleanup process
func startAutomaticCleanup(modelContext: ModelContext) {
logger.info("Starting automatic audio cleanup")
// Cancel any existing timer
cleanupTimer?.invalidate()
@ -35,21 +29,16 @@ class AudioCleanupManager {
await self?.performCleanup(modelContext: modelContext)
}
}
logger.info("Automatic cleanup scheduled")
}
/// Stop the automatic cleanup process
func stopAutomaticCleanup() {
logger.info("Stopping automatic audio cleanup")
cleanupTimer?.invalidate()
cleanupTimer = nil
}
/// Get information about the files that would be cleaned up
func getCleanupInfo(modelContext: ModelContext) async -> (fileCount: Int, totalSize: Int64, transcriptions: [Transcription]) {
logger.info("Analyzing potential audio cleanup")
// Get retention period from UserDefaults
let retentionDays = UserDefaults.standard.integer(forKey: "AudioRetentionPeriod")
let effectiveRetentionDays = retentionDays > 0 ? retentionDays : defaultRetentionDays
@ -57,7 +46,6 @@ class AudioCleanupManager {
// Calculate the cutoff date
let calendar = Calendar.current
guard let cutoffDate = calendar.date(byAdding: .day, value: -effectiveRetentionDays, to: Date()) else {
logger.error("Failed to calculate cutoff date")
return (0, 0, [])
}
@ -83,55 +71,38 @@ class AudioCleanupManager {
if let urlString = transcription.audioFileURL,
let url = URL(string: urlString),
FileManager.default.fileExists(atPath: url.path) {
do {
// Get file attributes to determine size
let attributes = try FileManager.default.attributesOfItem(atPath: url.path)
if let fileSize = attributes[.size] as? Int64 {
totalSize += fileSize
fileCount += 1
eligibleTranscriptions.append(transcription)
}
} catch {
self.logger.error("Failed to get attributes for \(url.lastPathComponent): \(error.localizedDescription)")
if let attributes = try? FileManager.default.attributesOfItem(atPath: url.path),
let fileSize = attributes[.size] as? Int64 {
totalSize += fileSize
fileCount += 1
eligibleTranscriptions.append(transcription)
}
}
}
self.logger.info("Found \(fileCount) files eligible for cleanup, totaling \(self.formatFileSize(totalSize))")
return (fileCount, totalSize, eligibleTranscriptions)
}
} catch {
logger.error("Error analyzing files for cleanup: \(error.localizedDescription)")
return (0, 0, [])
}
}
/// Perform the cleanup operation
private func performCleanup(modelContext: ModelContext) async {
logger.info("Performing audio cleanup")
// Get retention period from UserDefaults
let retentionDays = UserDefaults.standard.integer(forKey: "AudioRetentionPeriod")
let effectiveRetentionDays = retentionDays > 0 ? retentionDays : defaultRetentionDays
// Check if automatic cleanup is enabled
let isCleanupEnabled = UserDefaults.standard.bool(forKey: "IsAudioCleanupEnabled")
guard isCleanupEnabled else {
logger.info("Audio cleanup is disabled, skipping")
return
}
logger.info("Audio retention period: \(effectiveRetentionDays) days")
guard isCleanupEnabled else { return }
// Calculate the cutoff date
let calendar = Calendar.current
guard let cutoffDate = calendar.date(byAdding: .day, value: -effectiveRetentionDays, to: Date()) else {
logger.error("Failed to calculate cutoff date")
return
}
logger.info("Cutoff date for audio cleanup: \(cutoffDate)")
do {
// Execute SwiftData operations on the main thread
try await MainActor.run {
@ -144,38 +115,28 @@ class AudioCleanupManager {
)
let transcriptions = try modelContext.fetch(descriptor)
self.logger.info("Found \(transcriptions.count) transcriptions with audio files to clean up")
var deletedCount = 0
var errorCount = 0
for transcription in transcriptions {
if let urlString = transcription.audioFileURL,
let url = URL(string: urlString),
FileManager.default.fileExists(atPath: url.path) {
do {
// Delete the audio file
try FileManager.default.removeItem(at: url)
// Update the transcription to remove the audio file reference
transcription.audioFileURL = nil
deletedCount += 1
self.logger.debug("Deleted audio file: \(url.lastPathComponent)")
} catch {
errorCount += 1
self.logger.error("Failed to delete audio file \(url.lastPathComponent): \(error.localizedDescription)")
// Skip this file - don't update audioFileURL if deletion failed
}
}
}
if deletedCount > 0 || errorCount > 0 {
if deletedCount > 0 {
try modelContext.save()
self.logger.info("Cleanup complete. Deleted \(deletedCount) files. Failed: \(errorCount)")
}
}
} catch {
logger.error("Error during audio cleanup: \(error.localizedDescription)")
// Silently fail - cleanup is non-critical
}
}
@ -186,8 +147,6 @@ class AudioCleanupManager {
/// Run cleanup on the specified transcriptions
func runCleanupForTranscriptions(modelContext: ModelContext, transcriptions: [Transcription]) async -> (deletedCount: Int, errorCount: Int) {
logger.info("Running cleanup for \(transcriptions.count) specific transcriptions")
do {
// Execute SwiftData operations on the main thread
return try await MainActor.run {
@ -199,34 +158,22 @@ class AudioCleanupManager {
let url = URL(string: urlString),
FileManager.default.fileExists(atPath: url.path) {
do {
// Delete the audio file
try FileManager.default.removeItem(at: url)
// Update the transcription to remove the audio file reference
transcription.audioFileURL = nil
deletedCount += 1
self.logger.debug("Deleted audio file: \(url.lastPathComponent)")
} catch {
errorCount += 1
self.logger.error("Failed to delete audio file \(url.lastPathComponent): \(error.localizedDescription)")
}
}
}
if deletedCount > 0 || errorCount > 0 {
do {
try modelContext.save()
self.logger.info("Cleanup complete. Deleted \(deletedCount) files. Failed: \(errorCount)")
} catch {
self.logger.error("Error saving model context after cleanup: \(error.localizedDescription)")
}
try? modelContext.save()
}
return (deletedCount, errorCount)
}
} catch {
logger.error("Error during targeted cleanup: \(error.localizedDescription)")
return (0, 0)
}
}

View File

@ -49,13 +49,6 @@ struct VoiceInkApp: App {
// Attempt 1: Try persistent storage
if let persistentContainer = Self.createPersistentContainer(schema: schema, logger: logger) {
container = persistentContainer
#if DEBUG
// Print SwiftData storage location in debug builds only
if let url = persistentContainer.mainContext.container.configurations.first?.url {
print("💾 SwiftData storage location: \(url.path)")
}
#endif
}
// Attempt 2: Try in-memory storage
else if let memoryContainer = Self.createInMemoryContainer(schema: schema, logger: logger) {