diff --git a/README.md b/README.md index e1ec321..553f054 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ @@ -67,7 +67,7 @@ In a nutshell, ConfiChat caters to users who value transparent control over thei - **Local Model Support (Ollama and LlamaCpp)**: [Ollama](https://ollama.com) & [LlamaCpp](https://github.com/ggerganov/llama.cpp) both offer a range of lightweight, open-source local models, such as [Llama by Meta](https://ai.meta.com/llama/), [Gemma by Google](https://ai.google.dev/gemma), and [Llava](https://github.com/haotian-liu/LLaVA) for multimodal/image support. These models are designed to run efficiently even on machines with limited resources. -- **OpenAI Integration**: Seamlessly integrates with [OpenAI](https://openai.com) to provide advanced language model capabilities using your [own API key](https://platform.openai.com/docs/quickstart). Please note that while the API does not store conversations like ChatGPT does, OpenAI retains input data for abuse monitoring purposes. You can review their latest [data retention and security policies](https://openai.com/enterprise-privacy/). In particular, check the "How does OpenAI handle data retention and monitoring for API usage?" in their FAQ (https://openai.com/enterprise-privacy/). +- **OpenAI and Anthropic Support**: Seamlessly integrates with [OpenAI](https://openai.com) and [Anthropic](https://anthropic.com) to provide advanced language model capabilities using your [own API key](https://platform.openai.com/docs/quickstart). Please note that while the API does not store conversations like ChatGPT does, OpenAI retains input data for abuse monitoring purposes. You can review their latest [data retention and security policies](https://openai.com/enterprise-privacy/). In particular, check the "How does OpenAI handle data retention and monitoring for API usage?" in their FAQ (https://openai.com/enterprise-privacy/). - **Privacy-Focused**: Privacy is at the core of ConfiChat's development. The app is designed to prioritize user confidentiality, with optional chat history encryption ensuring that your data remains secure. diff --git a/confichat/lib/api_anthropic.dart b/confichat/lib/api_anthropic.dart new file mode 100644 index 0000000..cfeb6f6 --- /dev/null +++ b/confichat/lib/api_anthropic.dart @@ -0,0 +1,306 @@ +/* + * Copyright 2024 Rune Berg (http://runeberg.io | https://github.com/1runeberg) + * Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0) + * SPDX-License-Identifier: Apache-2.0 + */ + +import 'dart:async'; +import 'dart:io'; + +import 'package:flutter/foundation.dart'; +import 'package:path_provider/path_provider.dart'; +import 'package:http/http.dart' as http; +import 'dart:convert'; +import 'interfaces.dart'; + +import 'package:confichat/app_data.dart'; + + +class ApiAnthropic extends LlmApi{ + + static String version = '2023-06-01'; + static final ApiAnthropic _instance = ApiAnthropic._internal(); + static ApiAnthropic get instance => _instance; + + factory ApiAnthropic() { + return _instance; + } + ApiAnthropic._internal() : super(AiProvider.anthropic) { + + scheme = 'https'; + host = 'api.anthropic.com'; + port = 443; + path = '/v1'; + + defaultTemperature = 1.0; + defaultProbability = 1.0; + defaultMaxTokens = 1024; + defaultStopSequences = []; + + temperature = 1.0; + probability = 1.0; + maxTokens = 1024; + stopSequences = []; + } + + bool isImageTypeSupported(String extension){ + const allowedExtensions = ['jpeg', 'png', 'gif', 'webp']; + return allowedExtensions.contains(extension.toLowerCase()); + } + + // Implementations + @override + Future loadSettings() async { + final directory = AppData.instance.rootPath.isEmpty ? await getApplicationDocumentsDirectory() : Directory(AppData.instance.rootPath); + final filePath ='${directory.path}/${AppData.appStoragePath}/${AppData.appSettingsFile}'; + + if (await File(filePath).exists()) { + final fileContent = await File(filePath).readAsString(); + final Map settings = json.decode(fileContent); + + if (settings.containsKey(AiProvider.anthropic.name)) { + + // Override values in memory from disk + apiKey = settings[AiProvider.anthropic.name]['apikey'] ?? ''; + } + } + } + + @override + Future getModels(List outModels) async { + + // As of this writing, there doesn't seem to be an api endpoint to grab model names + outModels.add(ModelItem('claude-3-5-sonnet-20240620', 'claude-3-5-sonnet-20240620')); + outModels.add(ModelItem('claude-3-opus-20240229', 'claude-3-opus-20240229')); + outModels.add(ModelItem('claude-3-sonnet-20240229', 'claude-3-sonnet-20240229')); + outModels.add(ModelItem('claude-3-haiku-20240307', 'claude-3-haiku-20240307')); + } + + @override + Future getCachedMessagesInModel(List outCachedMessages, String modelId) async { + } + + @override + Future loadModelToMemory(String modelId) async { + return; // no need to preload model with chatgpt online models + } + + @override + Future getModelInfo(ModelInfo outModelInfo, String modelId) async { + // No function for this exists in Anthropic as of this writing + } + + @override + Future deleteModel(String modelId) async { + // todo: allow deletion of tuned models + } + + @override + Future sendPrompt({ + required String modelId, + required List> messages, + bool? getSummary, + Map? documents, + Map? codeFiles, + CallbackPassVoidReturnInt? onStreamRequestSuccess, + CallbackPassIntReturnBool? onStreamCancel, + CallbackPassIntChunkReturnVoid? onStreamChunkReceived, + CallbackPassIntReturnVoid? onStreamComplete, + CallbackPassDynReturnVoid? onStreamRequestError, + CallbackPassIntDynReturnVoid? onStreamingError + }) async { + try { + + // Set if this is a summary request + getSummary = getSummary ?? false; + + // Add documents if present + applyDocumentContext(messages: messages, documents: documents, codeFiles: codeFiles ); + + // Filter out empty stop sequences + List filteredStopSequences = stopSequences.where((s) => s.trim().isNotEmpty).toList(); + + // Assemble headers - this sequence seems to matter with Anthropic streaming + Map headers = {'anthropic-version': version}; + headers.addAll(AppData.headerJson); + headers.addAll({'x-api-key': apiKey}); + + // Parse message for sending to chatgpt + List> apiMessages = []; + + String systemPrompt = ''; + for (var message in messages) { + List> contentList = []; + + // Add the text content + if (message['content'] != null && message['content'].isNotEmpty) { + contentList.add({ + "type": "text", + "text": message['content'], + }); + } + + // Add the images if any + if (message['images'] != null) { + for (var imageFile in message['images']) { + + if(isImageTypeSupported(imageFile['ext'])){ + contentList.add({ + "type": "image", + "source": { + "type": "base64", + "media_type": "image/${imageFile['ext']}", + "data": imageFile['base64'], + } + }); + } + + } + } + + // Check for valid message + if(message.containsKey('role')) { + + // Check for system prompt + if(message['role'] == 'system') { + systemPrompt = message['content']; + } else { + // Add to message history + apiMessages.add({ + "role": message['role'], + "content": contentList, + }); + } + } + } + + // Add summary prompt + if( getSummary ) { + apiMessages.add({ + "role": 'user', + "content": summaryPrompt, + }); + } + + // Assemble request + final request = http.Request('POST', getUri('/messages')) + ..headers.addAll(headers); + + request.body = jsonEncode({ + 'model': modelId, + 'messages': apiMessages, + 'temperature': temperature, + 'top_p': probability, + 'max_tokens': maxTokens, + if (filteredStopSequences.isNotEmpty) 'stop_sequences': filteredStopSequences, + if (systemPrompt.isNotEmpty) 'system': systemPrompt, + 'stream': true + }); + + // Send request and await streamed response + final response = await request.send(); + + // Check the status of the response + if (response.statusCode == 200) { + + // Handle callback if any + int indexPayload = 0; + if(onStreamRequestSuccess != null) { indexPayload = onStreamRequestSuccess(); } + + // Listen for json object stream from api + StreamSubscription? streamSub; + streamSub = response.stream + .transform(utf8.decoder) + .transform(const LineSplitter()) // Split by lines + .transform(SseTransformer()) // Transform into SSE events + .listen((chunk) { + + // Check if user requested a cancel + bool cancelRequested = onStreamCancel != null; + if(cancelRequested){ cancelRequested = onStreamCancel(indexPayload); } + if(cancelRequested){ + if(onStreamComplete != null) { onStreamComplete(indexPayload); } + streamSub?.cancel(); + return; + } + + // Handle callback (if any) + if(chunk.isNotEmpty) + { + // Uncomment for testing + //print(chunk); + + // Parse the JSON string + Map jsonMap = jsonDecode(chunk); + + // Extract the first choice + if (jsonMap.containsKey('delta') && jsonMap['delta'].isNotEmpty) { + var delta = jsonMap['delta']; + + // Extract the content + if (delta.containsKey('text')) { + String content = delta['text']; + if (content.isNotEmpty && onStreamChunkReceived != null) { + onStreamChunkReceived(indexPayload, StreamChunk(content)); + } + } + } + + } + + }, onDone: () { + + if(onStreamComplete != null) { onStreamComplete(indexPayload); } + + }, onError: (error) { + + if (kDebugMode) {print('Streamed data request failed with error: $error');} + if(onStreamingError != null) { onStreamingError(indexPayload, error); } + }); + + } else { + if (kDebugMode) {print('Streamed data request failed with status: ${response.statusCode}\n');} + if(onStreamRequestError != null) { onStreamRequestError(response.statusCode); } + } + } catch (e) { + if (kDebugMode) { + print('Unable to get chat response: $e\n $responseData'); + } + } + + } + +} + +class SseTransformer extends StreamTransformerBase { + + @override + Stream bind(Stream stream) { + final controller = StreamController(); + final buffer = StringBuffer(); + + stream.listen((line) { + + // Uncomment for troubleshooting + //print(line); + + if (line.startsWith('data: {"type":"content_block_delta')) { // We're only interested with the content deltas + buffer.write(line.substring(6)); // Append line data to buffer, excluding the 'data: ' prefix + } else if (line.isEmpty) { + // Empty line indicates end of an event + if (buffer.isNotEmpty) { + final event = buffer.toString(); + if (event != '[DONE]') { controller.add(event); } + buffer.clear(); + } + } + }, onDone: () { + controller.close(); + }, onError: (error) { + controller.addError(error); + }); + + return controller.stream; + } + +} diff --git a/confichat/lib/api_ollama.dart b/confichat/lib/api_ollama.dart index f0a2e03..6989544 100644 --- a/confichat/lib/api_ollama.dart +++ b/confichat/lib/api_ollama.dart @@ -233,6 +233,29 @@ class ApiOllama extends LlmApi{ // Filter out empty stop sequences List filteredStopSequences = stopSequences.where((s) => s.trim().isNotEmpty).toList(); + // Process messages to extract images + List> processedMessages = messages.map((message) { + // Check for images in the message + if (message['images'] != null) { + // If images exist, extract the base64 values + List base64Images = []; + var images = message['images'] as List>; + + for (var image in images) { + base64Images.add(image['base64'] ?? ''); + } + + // Create a new message with extracted base64 images + return { + "role": message['role'], + "content": message['content'], + "images": base64Images, // Use only base64 images + }; + } + return message; // Return the message as is if no images + }).toList(); + + // Assemble request final request = http.Request('POST', getUri('/chat')) ..headers.addAll(AppData.headerJson); @@ -249,7 +272,7 @@ class ApiOllama extends LlmApi{ request.body = jsonEncode({ 'model': modelId, 'messages': [ - ...messages, + ...processedMessages, if (getSummary) summaryRequest, ], 'options': { diff --git a/confichat/lib/api_openai.dart b/confichat/lib/api_openai.dart index cf1511b..347c544 100644 --- a/confichat/lib/api_openai.dart +++ b/confichat/lib/api_openai.dart @@ -192,10 +192,10 @@ class ApiChatGPT extends LlmApi{ // Add the images if any if (message['images'] != null) { - for (var imageUrl in message['images']) { + for (var imageFile in message['images']) { contentList.add({ "type": "image_url", - "image_url": {"url": "data:image/jpeg;base64,$imageUrl"}, + "image_url": {"url": "data:image/${imageFile['ext']};base64,${imageFile['base64']}"}, }); } } @@ -302,4 +302,38 @@ class ApiChatGPT extends LlmApi{ } -} // ApiChatGPT +} + +class SseTransformer extends StreamTransformerBase { + + @override + Stream bind(Stream stream) { + final controller = StreamController(); + final buffer = StringBuffer(); + + stream.listen((line) { + + // Uncomment for troubleshooting + // print(line); + + if (line.startsWith('data: ')) { + // Append line data to buffer, excluding the 'data: ' prefix + buffer.write(line.substring(6)); + } else if (line.isEmpty) { + // Empty line indicates end of an event + if (buffer.isNotEmpty) { + final event = buffer.toString(); + if (event != '[DONE]') { controller.add(event); } + buffer.clear(); + } + } + }, onDone: () { + controller.close(); + }, onError: (error) { + controller.addError(error); + }); + + return controller.stream; + } + +} \ No newline at end of file diff --git a/confichat/lib/app_data.dart b/confichat/lib/app_data.dart index 9331a75..eead6e4 100644 --- a/confichat/lib/app_data.dart +++ b/confichat/lib/app_data.dart @@ -62,11 +62,14 @@ class AppData { case AiProvider.ollama: api = LlmApiFactory.create(AiProvider.ollama.name); break; + case AiProvider.llamacpp: + api = LlmApiFactory.create(AiProvider.llamacpp.name); + break; case AiProvider.openai: api = LlmApiFactory.create(AiProvider.openai.name); break; - case AiProvider.llamacpp: - api = LlmApiFactory.create(AiProvider.llamacpp.name); + case AiProvider.anthropic: + api = LlmApiFactory.create(AiProvider.anthropic.name); break; default: if (kDebugMode) { print('Unknown AI provider.'); } @@ -93,8 +96,9 @@ class AppData { enum AiProvider { ollama('Ollama', 0), - openai('OpenAI', 1), - llamacpp('LlamaCpp', 2); + llamacpp('LlamaCpp', 1), + openai('OpenAI', 2), + anthropic('Anthropic', 3); final String name; final int id; diff --git a/confichat/lib/factories.dart b/confichat/lib/factories.dart index d03220d..0a7b1a2 100644 --- a/confichat/lib/factories.dart +++ b/confichat/lib/factories.dart @@ -5,20 +5,25 @@ */ +import 'package:confichat/interfaces.dart'; + +import 'package:confichat/api_ollama.dart'; import 'package:confichat/api_llamacpp.dart'; import 'package:confichat/api_openai.dart'; -import 'package:confichat/api_ollama.dart'; -import 'package:confichat/interfaces.dart'; +import 'package:confichat/api_anthropic.dart'; + class LlmApiFactory { static LlmApi create(String apiProvider) { switch (apiProvider.toLowerCase()) { case 'ollama': return ApiOllama(); - case 'openai': - return ApiChatGPT(); case 'llamacpp': return ApiLlamaCpp(); + case 'openai': + return ApiChatGPT(); + case 'anthropic': + return ApiAnthropic(); default: throw Exception('Unsupported API provider: $apiProvider'); } diff --git a/confichat/lib/file_parser.dart b/confichat/lib/file_parser.dart index fec110f..8400c96 100644 --- a/confichat/lib/file_parser.dart +++ b/confichat/lib/file_parser.dart @@ -23,6 +23,14 @@ enum ParserFileType { unknown, } + +class ImageFile { + final String base64; + final String ext; + + ImageFile(this.base64, this.ext); +} + class FileParser { static const List imageFormats = [ @@ -65,13 +73,35 @@ class FileParser { return ParserFileType.unknown; } + static String getImageExtension(String filename){ + + final parts = filename.split('.'); + if (parts.length < 2) { + return ''; + } + + final extension = parts.last.toLowerCase(); + if(imageFormats.contains(extension)){ + if(extension == 'jpg' || extension == 'jpeg' ) { + return 'jpeg'; + } + + return extension; + } + + return ''; + } + static Future processImages({ required File file, - required List outImages, + required List outImages, }) async { List imageBytes = file.readAsBytesSync(); - String base64String = base64Encode(imageBytes); - outImages.add(base64String); + ImageFile imageFile = ImageFile(base64Encode(imageBytes), getImageExtension(file.path)); + + if(imageFile.ext.isNotEmpty && imageFile.base64.isNotEmpty){ + outImages.add(imageFile); + } } static Future processTextDocuments({ @@ -127,7 +157,7 @@ class FileParser { static Future processPlatformFiles({ required List files, required BuildContext context, - List? outImages, + List? outImages, Map? outDocuments, Map? outCodeFiles, }) async { @@ -177,7 +207,7 @@ class FileParser { static Future processDroppedFiles({ required DropDoneDetails details, required BuildContext context, - List? outImages, + List? outImages, Map? outDocuments, Map? outCodeFiles, }) async { diff --git a/confichat/lib/interfaces.dart b/confichat/lib/interfaces.dart index aac9461..e8a50ac 100644 --- a/confichat/lib/interfaces.dart +++ b/confichat/lib/interfaces.dart @@ -252,33 +252,3 @@ abstract class LlmApi { } // LlmApi - -class SseTransformer extends StreamTransformerBase { - - @override - Stream bind(Stream stream) { - final controller = StreamController(); - final buffer = StringBuffer(); - - stream.listen((line) { - if (line.startsWith('data: ')) { - // Append line data to buffer, excluding the 'data: ' prefix - buffer.write(line.substring(6)); - } else if (line.isEmpty) { - // Empty line indicates end of an event - if (buffer.isNotEmpty) { - final event = buffer.toString(); - if (event != '[DONE]') { controller.add(event); } - buffer.clear(); - } - } - }, onDone: () { - controller.close(); - }, onError: (error) { - controller.addError(error); - }); - - return controller.stream; - } - -} diff --git a/confichat/lib/main.dart b/confichat/lib/main.dart index b5df1cc..494e326 100644 --- a/confichat/lib/main.dart +++ b/confichat/lib/main.dart @@ -77,11 +77,14 @@ class ConfiChat extends StatelessWidget { case 'ollama': selectedProvider = AiProvider.ollama; break; + case 'llamacpp': + selectedProvider = AiProvider.llamacpp; + break; case 'openai': selectedProvider = AiProvider.openai; break; - case 'llamacpp': - selectedProvider = AiProvider.llamacpp; + case 'anthropic': + selectedProvider = AiProvider.anthropic; break; default: selectedProvider = AiProvider.ollama; // Fallback to Ollama if the string doesn't match @@ -119,11 +122,7 @@ class ConfiChat extends StatelessWidget { child: Column( mainAxisAlignment: MainAxisAlignment.center, children: [ - Image.asset( - 'assets/confichat_logo.png', - width: 100, // Set the desired width for the logo - height: 100, // Set the desired height for the logo - ), + Image.asset('assets/confichat_logo.png', width: 100, height: 100,), const SizedBox(height: 20), const CircularProgressIndicator(), ], diff --git a/confichat/lib/persistent_storage.dart b/confichat/lib/persistent_storage.dart index ab4f914..29ef933 100644 --- a/confichat/lib/persistent_storage.dart +++ b/confichat/lib/persistent_storage.dart @@ -381,12 +381,11 @@ static String encryptStringIV({ return base64IV; } - static void encryptChatData({ + static void encryptChatData({ required encrypt.IV iv, required String userKey, required List> chatData, }) { - for (var entry in chatData) { // Encrypt content if (entry['content'] != null) { @@ -400,30 +399,69 @@ static String encryptStringIV({ // Encrypt images if they exist if (entry['images'] != null && entry['images'] is List) { + entry['images'] = (entry['images'] as List).map((image) { + if (image is String) { + + // If the image is a base64 string, encrypt it - v0.4.0 and below final encryptedImage = CryptoUtils.encryptString( iv: iv, userKey: userKey, data: image, ); - return encryptedImage; + + return { + 'ext': 'jpeg', // Use a default extension if needed + 'base64': encryptedImage, + }; + + } else if (image is Map) { + + // If the image is a map, encrypt the base64 - v0.5.0 and above + final encryptedBase64 = CryptoUtils.encryptString( + iv: iv, + userKey: userKey, + data: image['base64'] ?? '', + ); + + return { + 'ext': image['ext'] ?? 'jpeg', // Keep the existing extension or default to 'jpeg' + 'base64': encryptedBase64, + }; + + } else if (image is Map) { + + // If the image is a map, encrypt the base64 - failsafe for other platforms + final encryptedBase64 = CryptoUtils.encryptString( + iv: iv, + userKey: userKey, + data: image['base64'] ?? '', + ); + + return { + 'ext': image['ext'] ?? 'jpeg', // Keep the existing extension or default to 'jpeg' + 'base64': encryptedBase64, + }; + } else { - if (kDebugMode) { print('Warning: Non-string image data encountered'); } + if (kDebugMode) { + print('Warning: Non-string or non-map image data encountered'); + } return null; } - }).whereType().toList(); + }).whereType>().toList(); // Filter out nulls and ensure the output is a List } - } + } } + static void decryptChatData({ required String base64IV, required String userKey, required List> chatData, }) { - for (var entry in chatData) { // Decrypt content if (entry['content'] != null) { @@ -435,48 +473,99 @@ static String encryptStringIV({ entry['content'] = decryptedContent; } - // Decrypt images if they exist - if (entry['images'] != null && entry['images'] is List) { - entry['images'] = (entry['images'] as List).map((encryptedImage) { - if (encryptedImage is String) { - try { - final decryptedImage = CryptoUtils.decryptString( - base64IV: base64IV, - userKey: userKey, - encryptedData: encryptedImage, - ); - return decryptedImage; - } catch (e) { - if (kDebugMode) { print('Error decrypting image: $e'); } - return null; + // Decrypt images if they exist + if (entry['images'] != null && entry['images'] is List) { + entry['images'] = (entry['images'] as List).map((encryptedImage) { + + if (encryptedImage is String) { + // If the image is a base64 string - v0.4.0 and below + try { + final decryptedImage = CryptoUtils.decryptString( + base64IV: base64IV, + userKey: userKey, + encryptedData: encryptedImage, + ); + return { + 'ext': 'jpeg', // Default extension; modify if needed + 'base64': decryptedImage, + }; + } catch (e) { + if (kDebugMode) { + print('Error decrypting image: $e'); + } + return null; // Return null for failed decryptions + } + + } else if (encryptedImage is Map) { + + // If the image is a map, decrypt the base64 value - v0.5.0 and above + try { + final decryptedBase64 = CryptoUtils.decryptString( + base64IV: base64IV, + userKey: userKey, + encryptedData: encryptedImage['base64'] ?? '', + ); + return { + 'ext': encryptedImage['ext'] ?? 'jpeg', // Retain the existing extension + 'base64': decryptedBase64, + }; + } catch (e) { + if (kDebugMode) { + print('Error decrypting image map: $e'); + } + return null; // Return null for failed decryptions + } + } else if (encryptedImage is Map) { + + // If the image is a map, decrypt the base64 value - failsafe for other platforms + try { + final decryptedBase64 = CryptoUtils.decryptString( + base64IV: base64IV, + userKey: userKey, + encryptedData: encryptedImage['base64'] ?? '', + ); + return { + 'ext': encryptedImage['ext'] ?? 'jpeg', // Retain the existing extension + 'base64': decryptedBase64, + }; + } catch (e) { + if (kDebugMode) { + print('Error decrypting image map: $e'); + } + return null; // Return null for failed decryptions + } + + } else { + if (kDebugMode) { + print('Warning: Non-string or non-map encrypted image data encountered'); + } + return null; // Return null for non-string and non-map values } - } else { - if (kDebugMode) { print('Warning: Non-string encrypted image data encountered'); } - return null; - } - }).whereType().toList(); - } + }).whereType>().toList(); // Filter out null values and ensure output is List + } } } + static void decryptToChatData({ required String base64IV, required String userKey, required dynamic jsonData, required List> chatData, }) { - final List> encryptedData = List>.from(jsonDecode(jsonData)); - if(encryptedData.isEmpty) { return; } + if (encryptedData.isEmpty) { + return; + } chatData.clear(); for (var entry in encryptedData) { - String decryptedContent = ''; + // Decrypt content if (entry['content'] != null) { - decryptedContent = CryptoUtils.decryptString( + decryptedContent = CryptoUtils.decryptString( base64IV: base64IV, userKey: userKey, encryptedData: entry['content']!, @@ -484,40 +573,56 @@ static String encryptStringIV({ } // Decrypt images if they exist - List decryptedImages = []; - if (entry['images'] != null && entry['images'] is List) { - decryptedImages = (entry['images'] as List).map((encryptedImage) { - if (encryptedImage is String) { - try { - return CryptoUtils.decryptString( + List> decryptedImages = []; + if (entry['images'] != null) { + var images = entry['images'] as List; + + for (var item in images) { + if (item is String) { + // Decrypting string images + try { + decryptedImages.add({ + 'ext': 'jpeg', // Default extension; adjust as necessary + 'base64': CryptoUtils.decryptString( base64IV: base64IV, userKey: userKey, - encryptedData: encryptedImage, - ); - } catch (e) { - if (kDebugMode) { - print('Error decrypting image: $e'); - } - return null; // Return null to filter out failed decryptions + encryptedData: item, + ), + }); + } catch (e) { + if (kDebugMode) { + print('Error decrypting image: $e'); } - } else { + } + } else if (item is Map) { + // Decrypting map images + try { + decryptedImages.add({ + 'ext': item['ext'] ?? 'jpeg', // Get the ext if available + 'base64': CryptoUtils.decryptString( + base64IV: base64IV, + userKey: userKey, + encryptedData: item['base64'] ?? '', + ), + }); + } catch (e) { if (kDebugMode) { - print('Warning: Non-string encrypted image data encountered'); + print('Error decrypting map image: $e'); } - return null; // Return null for non-string values } - }).whereType().toList(); // Filter out null values + } } + } // Add entry to chat data - chatData.add( { + chatData.add({ "role": entry['role'] ?? '', "content": decryptedContent, - "images": decryptedImages + "images": decryptedImages.isNotEmpty ? decryptedImages : null, // Only include images if there are any }); - } } + } diff --git a/confichat/lib/ui_anthropic_options.dart b/confichat/lib/ui_anthropic_options.dart new file mode 100644 index 0000000..4bc03de --- /dev/null +++ b/confichat/lib/ui_anthropic_options.dart @@ -0,0 +1,185 @@ +/* + * Copyright 2024 Rune Berg (http://runeberg.io | https://github.com/1runeberg) + * Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0) + * SPDX-License-Identifier: Apache-2.0 + */ + +import 'dart:io'; +import 'dart:convert'; +import 'package:flutter/material.dart'; +import 'package:confichat/ui_widgets.dart'; +import 'package:path_provider/path_provider.dart'; + +import 'package:confichat/app_data.dart'; + + +class AnthropicOptions extends StatefulWidget { + final AppData appData; + + const AnthropicOptions({super.key, required this.appData}); + + @override + AnthropicOptionsState createState() => AnthropicOptionsState(); +} + +class AnthropicOptionsState extends State { + final TextEditingController _apiKeyController = TextEditingController(); + final FocusNode _focusNode = FocusNode(); + + @override + void initState() { + super.initState(); + _loadSettings(); + WidgetsBinding.instance.addPostFrameCallback((_) { + _focusNode.requestFocus(); + }); + } + + @override + void dispose() { + _apiKeyController.dispose(); + _focusNode.dispose(); + super.dispose(); + } + + Future _loadSettings() async { + final directory = AppData.instance.rootPath.isEmpty ? await getApplicationDocumentsDirectory() : Directory(AppData.instance.rootPath); + final filePath ='${directory.path}/${AppData.appStoragePath}/${AppData.appSettingsFile}'; + + if (await File(filePath).exists()) { + final fileContent = await File(filePath).readAsString(); + final Map settings = json.decode(fileContent); + + if (settings.containsKey(AiProvider.anthropic.name)) { + + // Set the form text + _apiKeyController.text = settings[AiProvider.anthropic.name]['apikey'] ?? ''; + + if(widget.appData.api.aiProvider.name == AiProvider.anthropic.name){ _applyValues(); } + + } else { + _useDefaultSettings(); + } + } else { + _useDefaultSettings(); + } + } + + void _useDefaultSettings() { + //_apiKeyController.text = ''; + _applyValues(); + } + + void _applyValues() { + if(widget.appData.api.aiProvider.name == AiProvider.anthropic.name) { + AppData.instance.api.apiKey = _apiKeyController.text; } + } + + Future _saveSettings() async { + // Set file path + final directory = AppData.instance.rootPath.isEmpty ? await getApplicationDocumentsDirectory() : Directory(AppData.instance.rootPath); + final filePath = '${directory.path}/${AppData.appStoragePath}/${AppData.appSettingsFile}'; + + // Set new valuie + final newSetting = { + 'apikey': _apiKeyController.text, + }; + + // Save to disk + Map settings; + final file = File(filePath); + + if (await file.exists()) { + // If the file exists, read the content and parse it + final content = await file.readAsString(); + settings = json.decode(content) as Map; + + // Check if the object name exists, and update it + if (settings.containsKey(AiProvider.anthropic.name)) { + settings[AiProvider.anthropic.name] = newSetting; + } else { + settings[AiProvider.anthropic.name] = newSetting; + } + } else { + settings = { AiProvider.anthropic.name: newSetting }; + } + + // Update in-memory values + _applyValues(); + + // Save the updated settings to disk + await file.create(recursive: true); + await file.writeAsString(const JsonEncoder.withIndent(' ').convert(settings)); + + // Reset model values + if(widget.appData.api.aiProvider.name == AiProvider.anthropic.name) { + AppData.instance.callbackSwitchProvider(AiProvider.anthropic); + } + + // Close window + if (mounted) { + Navigator.of(context).pop(); + } + } + + + @override + Widget build(BuildContext context) { + return Dialog( + child: Padding( + padding: const EdgeInsets.all(16.0), + child: ConstrainedBox( + constraints: const BoxConstraints(maxWidth: 400), + child: Column( + mainAxisSize: MainAxisSize.min, + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + + // Window title + DialogTitle(title: '${AiProvider.anthropic.name} Options'), + const SizedBox(height: 24), + + TextField( + controller: _apiKeyController, + decoration: InputDecoration( + labelText: 'API Key', + labelStyle: Theme.of(context).textTheme.labelSmall, + border: const UnderlineInputBorder(), + ), + ), + + const SizedBox(height: 16), + Align( + alignment: Alignment.bottomRight, + child: Row( + mainAxisSize: MainAxisSize.min, + children: [ + + ElevatedButton( + onPressed: () async { + await _saveSettings(); + }, + child: const Text('Save'), + ), + + const SizedBox(width: 8), + ElevatedButton( + onPressed: () { + Navigator.of(context).pop(); + }, + focusNode: _focusNode, + child: const Text('Cancel'), + ), + + ], + ), + ), + ], + ), + ), + ), + ); + } + +} diff --git a/confichat/lib/ui_canvass.dart b/confichat/lib/ui_canvass.dart index 6c59ba6..a0a2b2f 100644 --- a/confichat/lib/ui_canvass.dart +++ b/confichat/lib/ui_canvass.dart @@ -51,7 +51,7 @@ class CanvassState extends State { Map> chatDocuments = {}; Map> chatCodeFiles = {}; - List base64Images = []; + List base64Images = []; Map documents = {}; Map codeFiles = {}; Map processingData = {}; @@ -172,11 +172,12 @@ class CanvassState extends State { fnCancelProcessing: isProcessing ? _cancelProcessing : null, indexProcessing: (isProcessing && (processingData[currentIndex] != null && processingData[currentIndex]!)) ? currentIndex : null, textData: chatData[currentIndex]['role'] == 'system' ? "!system_prompt_ignore" : chatData[currentIndex]['content'], - images:chatData[currentIndex]['images'] != null - ? (chatData[currentIndex]['images'] as List) - .map((item) => item as String) - .toList() - : null, + images: chatData[currentIndex]['images'] != null + ? (chatData[currentIndex]['images'] as List>) + .map((item) => item['base64'] as String) + .toList() + : null, + documents: chatDocuments.containsKey(currentIndex) ? chatDocuments[currentIndex] : null, codeFiles: chatCodeFiles.containsKey(currentIndex) ? chatCodeFiles[currentIndex] : null, ); @@ -208,13 +209,13 @@ class CanvassState extends State { context: context, builder: (BuildContext context) { return Dialog( - child: ImagePreview(base64Image: image), + child: ImagePreview(base64Image: image.base64), ); }, ); }, child: Image.memory( - base64Decode(image), + base64Decode(image.base64), height: 50, width: 50, fit: BoxFit.cover, @@ -610,18 +611,44 @@ class CanvassState extends State { } else { PersistentStorage.setAppData(jsonData); - final messageHistory = jsonData['messages']; + final messageHistory = jsonDecode(jsonData['messages']); - if(messageHistory != null && messageHistory.isNotEmpty) - { + if (messageHistory != null && messageHistory.isNotEmpty) { setState(() { - chatData = List>.from(jsonDecode(messageHistory)); - }); - } - } + chatData = List>.from(messageHistory.map((message) { + // Handle images separately + if (message['images'] != null) { + var images = message['images'] as List; + + // Prepare a list to hold parsed images + List> parsedImages = []; + + for (var item in images) { + if (item is String) { + // If the item is a String, assume it's a base64 and set the default ext - v0.4.0 and below + parsedImages.add({ + 'ext': 'jpeg', // Default extension from v0.4.0 + 'base64': item, + }); + } else if (item is Map) { + // If the item is a Map, extract base64 = v0.5.0 and above + parsedImages.add({ + 'ext': item['ext'] ?? 'jpeg', // Get the ext if available, otherwise default to 'jpeg' (0.4.0) + 'base64': item['base64'] ?? '', // Get the base64 value + }); + } + } + + // Assign the parsed images back to the message + message['images'] = parsedImages.isNotEmpty ? parsedImages : null; + } - }, + return message; // Return the modified message + })); + }); + }}} ), + ElevatedButton( child: const Text('Cancel'), onPressed: () { @@ -848,7 +875,12 @@ class CanvassState extends State { chatData.add({ "role": "user", "content": promptText, - "images": base64Images.isNotEmpty ? List.from(base64Images) : null + "images": base64Images.isNotEmpty + ? base64Images.map((image) => { + "ext": image.ext, + "base64": image.base64, + }).toList() + : null }); // Add any document or code file in to the chat data diff --git a/confichat/lib/ui_sidebar.dart b/confichat/lib/ui_sidebar.dart index 06751bd..16648e3 100644 --- a/confichat/lib/ui_sidebar.dart +++ b/confichat/lib/ui_sidebar.dart @@ -4,18 +4,20 @@ * SPDX-License-Identifier: Apache-2.0 */ -import 'package:confichat/ui_llamacpp_options.dart'; -import 'package:confichat/ui_terms_and_conditions.dart'; import 'package:flutter/material.dart'; - import 'dart:io'; import 'package:confichat/app_data.dart'; import 'package:confichat/chat_notifiers.dart'; import 'package:confichat/persistent_storage.dart'; import 'package:confichat/ui_app_settings.dart'; -import 'package:confichat/ui_openai_options.dart'; + import 'package:confichat/ui_ollama_options.dart'; +import 'package:confichat/ui_llamacpp_options.dart'; +import 'package:confichat/ui_openai_options.dart'; +import 'package:confichat/ui_anthropic_options.dart'; +import 'package:confichat/ui_terms_and_conditions.dart'; + import 'package:confichat/ui_widgets.dart'; import 'package:path_provider/path_provider.dart'; import 'package:provider/provider.dart'; @@ -198,7 +200,21 @@ class SidebarState extends State { }, ), - // (2.2.2) OpenAI options + // (2.2.2) LlamaCpp options + ListTile( + title: Text(AiProvider.llamacpp.name), + onTap: () { + showDialog( + context: context, + barrierDismissible: false, + builder: (BuildContext context) { + return LlamaCppOptions(appData: widget.appData); + }, + ); + }, + ), + + // (2.2.3) OpenAI options ListTile( title: Text(AiProvider.openai.name), onTap: () { @@ -212,21 +228,21 @@ class SidebarState extends State { }, ), - // (2.2.3) LlamaCpp options + // (2.2.4) Anthropic options ListTile( - title: Text(AiProvider.llamacpp.name), + title: Text(AiProvider.anthropic.name), onTap: () { showDialog( context: context, barrierDismissible: false, builder: (BuildContext context) { - return LlamaCppOptions(appData: widget.appData); + return AnthropicOptions(appData: widget.appData); }, ); }, ), - // (2.2.4) App settings + // (2.2.5) App settings ListTile( title: const Text('Application settings'), onTap: () { diff --git a/confichat/lib/ui_widgets.dart b/confichat/lib/ui_widgets.dart index be910d1..1f1f20d 100644 --- a/confichat/lib/ui_widgets.dart +++ b/confichat/lib/ui_widgets.dart @@ -6,12 +6,12 @@ import 'dart:math'; +import 'package:code_highlight_view/code_highlight_view.dart'; +import 'package:code_highlight_view/themes/github.dart'; import 'package:confichat/persistent_storage.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_markdown/flutter_markdown.dart'; -import 'package:flutter_highlight/flutter_highlight.dart'; -import 'package:flutter_highlight/themes/github.dart'; import 'package:confichat/app_data.dart'; import 'package:markdown/markdown.dart' as md; @@ -297,7 +297,7 @@ class CodePreviewBuilder extends MarkdownElementBuilder { border: Border.all(color: Colors.grey), borderRadius: BorderRadius.circular(3), ), - child: HighlightView( + child: CodeHighlightView( element.textContent, padding: const EdgeInsets.all(10), language: getCodeLanguage(element), diff --git a/confichat/pubspec.yaml b/confichat/pubspec.yaml index 16e1a37..d528e69 100644 --- a/confichat/pubspec.yaml +++ b/confichat/pubspec.yaml @@ -12,7 +12,6 @@ dependencies: file_picker: ^8.0.0+1 flutter: sdk: flutter - flutter_highlight: ^0.7.0 flutter_markdown: ^0.7.3+1 http: ^1.2.2 intl: ^0.19.0 @@ -23,6 +22,7 @@ dependencies: pdfrx: ^1.0.82 encrypt: ^5.0.3 crypto: ^3.0.3 + code_highlight_view: ^0.1.1 dev_dependencies: flutter_test: diff --git a/docs/quickstart.md b/docs/quickstart.md index 69c8582..9c2b7e5 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -10,7 +10,7 @@ Get up and running with **ConfiChat** by following this guide. Whether you're us - [3. Set Up ConfiChat](#3-set-up-confichat) - [Additional Resources](#additional-resources) 2. [Getting started with Online Models](#using-online-models) - - [1. Get Your OpenAI API Key](#1-get-your-openai-api-key) + - [1. Get Your OpenAI or Anthropic API Key](#1-get-your-api-key) - [2. Set Up ConfiChat](#2-set-up-confichat) - [3. Configure ConfiChat with Your API Key](#3-configure-confichat-with-your-api-key) - [Additional Resources](#additional-resources-1) @@ -18,7 +18,7 @@ Get up and running with **ConfiChat** by following this guide. Whether you're us - [1. Install Ollama](#1-install-ollama) - [2. Download a Model](#2-download-a-model) - [3. Set Up ConfiChat](#3-set-up-confichat) - - [4. Get Your OpenAI API Key](#4-get-your-openai-api-key) + - [4. Get Your OpenAI or Anthropic API Key](#4-get-your-api-key) - [5. Configure ConfiChat with Your API Key](#5-configure-confichat-with-your-api-key) - [Additional Resources](#additional-resources-2) 4. [Using ConfiChat with LlamaCpp](#using-confichat-with-llamacpp) @@ -78,13 +78,13 @@ For more detailed instructions and troubleshooting, please visit the [Ollama doc Get started with **ConfiChat** and **OpenAI** by following these simple steps. You'll set up your OpenAI API key, download ConfiChat, and configure it to use OpenAI. -### 1. Get Your OpenAI API Key +### 1. Get Your API Key To use OpenAI with ConfiChat, you first need to obtain an API key: -1. Go to the [OpenAI API](https://platform.openai.com/account/api-keys) page. -2. Log in with your OpenAI account. -3. Click on "Create new secret key" and copy the generated API key. +1. Go to the [OpenAI API](https://platform.openai.com/account/api-keys) or [Anthropic API](https://console.anthropic.com/settings/keys) page. +2. Log in with your account. +3. Follow the on screen instructions. Keep your API key secure and do not share it publicly. @@ -98,15 +98,15 @@ Note: There may be a warning during first run as the binaries are unsigned. Once ConfiChat is running: -1. Navigate to **Settings > OpenAI**. -2. Paste your OpenAI API key into the provided form. +1. Navigate to **Settings > OpenAI** or **Settings > Anthropic**. +2. Paste your API key into the provided form. 3. Click "Save" to apply the changes. ConfiChat is now configured to use OpenAI for its language model capabilities! ### Additional Resources -For more detailed instructions and troubleshooting, please visit the [OpenAI documentation](https://platform.openai.com/docs). +For more detailed instructions and troubleshooting, please visit the [OpenAI documentation](https://platform.openai.com/docs) or the [Anthropic documentation](https://docs.anthropic.com/en/docs/welcome). --- @@ -128,9 +128,9 @@ Follow the instructions in the [Download a Model](#2-download-a-model) section a Note: There may be a warning during first run as the binaries are unsigned. -### 4. Get Your OpenAI API Key +### 4. Get Your API Key -Follow the instructions in the [Get Your OpenAI API Key](#1-get-your-openai-api-key) section above. +Follow the instructions in the [Get Your API Key](#1-get-your-api-key) section above. ### 5. Configure ConfiChat with Your API Key @@ -138,7 +138,7 @@ Follow the instructions in the [Configure ConfiChat with Your API Key](#3-config ### Additional Resources -For more detailed instructions and troubleshooting, please visit the [Ollama documentation](https://ollama.com/docs), the [OpenAI documentation](https://platform.openai.com/docs), and the [ConfiChat repository](https://github.com/your-repository/ConfiChat). +For more detailed instructions and troubleshooting, please visit the [Ollama documentation](https://ollama.com/docs), the [OpenAI documentation](https://platform.openai.com/docs), the [Anthropic documentation](https://docs.anthropic.com/en/docs/welcome) and the [ConfiChat repository](https://github.com/your-repository/ConfiChat). ## Using ConfiChat with LlamaCpp