diff --git a/README.md b/README.md
index e1ec321..553f054 100644
--- a/README.md
+++ b/README.md
@@ -17,7 +17,7 @@
- Offline providers like Ollama and LlamaCpp provide privacy by operating on your local machine or network without cloud services.
- - Online providers like OpenAI offer cutting-edge models via APIs, which have different privacy policies than their chat services, giving you greater control over your data.
+ - Online providers like OpenAI and Anthropic offer cutting-edge models via APIs, which have different privacy policies than their chat services, giving you greater control over your data.
@@ -67,7 +67,7 @@ In a nutshell, ConfiChat caters to users who value transparent control over thei
- **Local Model Support (Ollama and LlamaCpp)**: [Ollama](https://ollama.com) & [LlamaCpp](https://github.com/ggerganov/llama.cpp) both offer a range of lightweight, open-source local models, such as [Llama by Meta](https://ai.meta.com/llama/), [Gemma by Google](https://ai.google.dev/gemma), and [Llava](https://github.com/haotian-liu/LLaVA) for multimodal/image support. These models are designed to run efficiently even on machines with limited resources.
-- **OpenAI Integration**: Seamlessly integrates with [OpenAI](https://openai.com) to provide advanced language model capabilities using your [own API key](https://platform.openai.com/docs/quickstart). Please note that while the API does not store conversations like ChatGPT does, OpenAI retains input data for abuse monitoring purposes. You can review their latest [data retention and security policies](https://openai.com/enterprise-privacy/). In particular, check the "How does OpenAI handle data retention and monitoring for API usage?" in their FAQ (https://openai.com/enterprise-privacy/).
+- **OpenAI and Anthropic Support**: Seamlessly integrates with [OpenAI](https://openai.com) and [Anthropic](https://anthropic.com) to provide advanced language model capabilities using your [own API key](https://platform.openai.com/docs/quickstart). Please note that while the API does not store conversations like ChatGPT does, OpenAI retains input data for abuse monitoring purposes. You can review their latest [data retention and security policies](https://openai.com/enterprise-privacy/). In particular, check the "How does OpenAI handle data retention and monitoring for API usage?" in their FAQ (https://openai.com/enterprise-privacy/).
- **Privacy-Focused**: Privacy is at the core of ConfiChat's development. The app is designed to prioritize user confidentiality, with optional chat history encryption ensuring that your data remains secure.
diff --git a/confichat/lib/api_anthropic.dart b/confichat/lib/api_anthropic.dart
new file mode 100644
index 0000000..cfeb6f6
--- /dev/null
+++ b/confichat/lib/api_anthropic.dart
@@ -0,0 +1,306 @@
+/*
+ * Copyright 2024 Rune Berg (http://runeberg.io | https://github.com/1runeberg)
+ * Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0)
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:flutter/foundation.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:http/http.dart' as http;
+import 'dart:convert';
+import 'interfaces.dart';
+
+import 'package:confichat/app_data.dart';
+
+
+class ApiAnthropic extends LlmApi{
+
+ static String version = '2023-06-01';
+ static final ApiAnthropic _instance = ApiAnthropic._internal();
+ static ApiAnthropic get instance => _instance;
+
+ factory ApiAnthropic() {
+ return _instance;
+ }
+ ApiAnthropic._internal() : super(AiProvider.anthropic) {
+
+ scheme = 'https';
+ host = 'api.anthropic.com';
+ port = 443;
+ path = '/v1';
+
+ defaultTemperature = 1.0;
+ defaultProbability = 1.0;
+ defaultMaxTokens = 1024;
+ defaultStopSequences = [];
+
+ temperature = 1.0;
+ probability = 1.0;
+ maxTokens = 1024;
+ stopSequences = [];
+ }
+
+ bool isImageTypeSupported(String extension){
+ const allowedExtensions = ['jpeg', 'png', 'gif', 'webp'];
+ return allowedExtensions.contains(extension.toLowerCase());
+ }
+
+ // Implementations
+ @override
+ Future loadSettings() async {
+ final directory = AppData.instance.rootPath.isEmpty ? await getApplicationDocumentsDirectory() : Directory(AppData.instance.rootPath);
+ final filePath ='${directory.path}/${AppData.appStoragePath}/${AppData.appSettingsFile}';
+
+ if (await File(filePath).exists()) {
+ final fileContent = await File(filePath).readAsString();
+ final Map settings = json.decode(fileContent);
+
+ if (settings.containsKey(AiProvider.anthropic.name)) {
+
+ // Override values in memory from disk
+ apiKey = settings[AiProvider.anthropic.name]['apikey'] ?? '';
+ }
+ }
+ }
+
+ @override
+ Future getModels(List outModels) async {
+
+ // As of this writing, there doesn't seem to be an api endpoint to grab model names
+ outModels.add(ModelItem('claude-3-5-sonnet-20240620', 'claude-3-5-sonnet-20240620'));
+ outModels.add(ModelItem('claude-3-opus-20240229', 'claude-3-opus-20240229'));
+ outModels.add(ModelItem('claude-3-sonnet-20240229', 'claude-3-sonnet-20240229'));
+ outModels.add(ModelItem('claude-3-haiku-20240307', 'claude-3-haiku-20240307'));
+ }
+
+ @override
+ Future getCachedMessagesInModel(List outCachedMessages, String modelId) async {
+ }
+
+ @override
+ Future loadModelToMemory(String modelId) async {
+ return; // no need to preload model with chatgpt online models
+ }
+
+ @override
+ Future getModelInfo(ModelInfo outModelInfo, String modelId) async {
+ // No function for this exists in Anthropic as of this writing
+ }
+
+ @override
+ Future deleteModel(String modelId) async {
+ // todo: allow deletion of tuned models
+ }
+
+ @override
+ Future sendPrompt({
+ required String modelId,
+ required List