From d6663431211cb40000074a4b4271fe663959f6ae Mon Sep 17 00:00:00 2001 From: Alexey Haidamaka Date: Tue, 26 Aug 2025 16:32:43 +0200 Subject: [PATCH] added mistral-medium-latest https://docs.mistral.ai/getting-started/models/models_overview/#api-versioning Medium offers a reasonable balance between Small and Large in output and speed. Especially now when Large is sometimes is frequently over capacity. This is my go to model for Mistral. --- VoiceInk/Services/AIService.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/VoiceInk/Services/AIService.swift b/VoiceInk/Services/AIService.swift index b6f978f..b6b6799 100644 --- a/VoiceInk/Services/AIService.swift +++ b/VoiceInk/Services/AIService.swift @@ -114,6 +114,7 @@ enum AIProvider: String, CaseIterable { case .mistral: return [ "mistral-large-latest", + "mistral-medium-latest", "mistral-small-latest", "mistral-saba-latest" ]