diff --git a/.github/workflows/publish_release.yml b/.github/workflows/publish_release.yml
index 1ca8bf8..4099065 100644
--- a/.github/workflows/publish_release.yml
+++ b/.github/workflows/publish_release.yml
@@ -9,6 +9,9 @@ on:
release_name:
description: 'Release name'
required: true
+ release_notes:
+ description: 'Release Notes'
+ required: true
jobs:
build-and-release:
@@ -21,7 +24,10 @@ jobs:
env:
RELEASE_NOTES: |
**Notice:** By downloading and using the pre-built binaries, you agree to the app's [Terms and Conditions](https://github.com/1runeberg/confichat/blob/main/confichat/assets/TERMS_AND_CONDITIONS.md). Acceptance of these terms is implied upon download. The full Terms and Conditions are also available within the app under (Hamburger menu) > "Legal" > "Terms and Conditions".
-
+
+
+ ${{ github.event.inputs.release_notes }}
+
steps:
- uses: actions/checkout@v4.1.7
diff --git a/README.md b/README.md
index f4d1b20..fcc706d 100644
--- a/README.md
+++ b/README.md
@@ -55,7 +55,7 @@ In a nutshell, ConfiChat caters to users who value transparent control over thei
### 🛠️ 5. Compiling your own build
-For those who prefer to compile ConfiChat themselves, or for macOS and iOS users, we provide detailed instructions in the [Compiling on your Own](docs/compiling.md) section.
+For those who prefer to compile ConfiChat themselves, or for macOS and iOS users, we provide detailed instructions in the [Compiling on your own](docs/compiling.md) section.
### 🤝 6. Contributing
diff --git a/confichat/.idea/workspace.xml b/confichat/.idea/workspace.xml
index ea5d72d..638e1b6 100644
--- a/confichat/.idea/workspace.xml
+++ b/confichat/.idea/workspace.xml
@@ -10,10 +10,7 @@
-
-
-
-
+
@@ -55,21 +52,21 @@
- {
+ "keyToString": {
+ "Flutter.main.dart.executor": "Run",
+ "RunOnceActivity.ShowReadmeOnStart": "true",
+ "RunOnceActivity.cidr.known.project.marker": "true",
+ "RunOnceActivity.readMode.enableVisualFormatting": "true",
+ "cf.first.check.clang-format": "false",
+ "cidr.known.project.marker": "true",
+ "dart.analysis.tool.window.visible": "false",
+ "io.flutter.reload.alreadyRun": "true",
+ "kotlin-language-version-configured": "true",
+ "last_opened_file_path": "E:/GitHub/confichat/confichat",
+ "show.migrate.to.gradle.popup": "false"
}
-}]]>
+}
diff --git a/confichat/lib/api_llamacpp.dart b/confichat/lib/api_llamacpp.dart
new file mode 100644
index 0000000..78f4759
--- /dev/null
+++ b/confichat/lib/api_llamacpp.dart
@@ -0,0 +1,368 @@
+/*
+ * Copyright 2024 Rune Berg (http://runeberg.io | https://github.com/1runeberg)
+ * Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0)
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:flutter/foundation.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:http/http.dart' as http;
+import 'dart:convert';
+import 'interfaces.dart';
+import 'package:intl/intl.dart';
+
+import 'package:confichat/app_data.dart';
+
+
+class ApiLlamaCpp extends LlmApi{
+
+ static final ApiLlamaCpp _instance = ApiLlamaCpp._internal();
+ static ApiLlamaCpp get instance => _instance;
+
+ factory ApiLlamaCpp() {
+ return _instance;
+ }
+
+ ApiLlamaCpp._internal() : super(AiProvider.llamacpp) {
+
+ scheme = 'http';
+ host = 'localhost';
+ port = 8080;
+ path = '/v1';
+ apiKey = '';
+
+ defaultTemperature = 1.0;
+ defaultProbability = 1.0;
+ defaultMaxTokens = 4096;
+ defaultStopSequences = [];
+
+ temperature = 1.0;
+ probability = 1.0;
+ maxTokens = 4096;
+ stopSequences = [];
+ }
+
+ // Implementations
+ @override
+ Future loadSettings() async {
+ final directory = AppData.instance.rootPath.isEmpty ? await getApplicationDocumentsDirectory() : Directory(AppData.instance.rootPath);
+ final filePath ='${directory.path}/${AppData.appStoragePath}/${AppData.appSettingsFile}';
+
+ if (await File(filePath).exists()) {
+ final fileContent = await File(filePath).readAsString();
+ final Map settings = json.decode(fileContent);
+
+ if (settings.containsKey(AiProvider.llamacpp.name)) {
+
+ // Override values in memory from disk
+ scheme = settings[AiProvider.llamacpp.name]['scheme'] ?? 'http';
+ host = settings[AiProvider.llamacpp.name]['host'] ?? 'localhost';
+ port = settings[AiProvider.llamacpp.name]['port'] ?? 8080;
+ path = settings[AiProvider.llamacpp.name]['path'] ?? '/v1';
+ apiKey = settings[AiProvider.llamacpp.name]['apikey'] ?? '';
+ }
+ }
+ }
+
+ @override
+ Future getModels(List outModels) async {
+
+ try {
+
+ // Add authorization header
+ final Map headers = {'Authorization': 'Bearer $apiKey'};
+ if(apiKey.trim().isEmpty){
+ headers['Authorization'] = 'Bearer no-key';
+ }
+
+ // Retrieve active models for provider
+ await getData(url: getUri('/models'), requestHeaders: headers);
+
+ // Decode response
+ final Map jsonData = jsonDecode(responseData);
+ final List modelsJson = jsonData['data'];
+
+ // Parse to ModelItem
+ for (var json in modelsJson) {
+ final String id = json['id'];
+ outModels.add(ModelItem(id, id));
+ }
+
+ } catch (e) {
+ // Catch and handle the FormatException
+ if (kDebugMode) { print('Unable to retrieve models ($host): $e\n $responseData'); }
+ }
+
+ }
+
+ @override
+ Future getCachedMessagesInModel(List outCachedMessages, String modelId) async {
+ }
+
+ @override
+ Future loadModelToMemory(String modelId) async {
+ return; // model is loaded via llama-server -m (default is: models/7B/ggml-model-f16.gguf)
+ }
+
+ @override
+ Future getModelInfo(ModelInfo outModelInfo, String modelId) async {
+
+ // As of this writing, there doesn't appear to be an endpoint to probe model info,
+ // so we'll use general settings from models query
+ try {
+
+ // Add authorization header
+ final Map headers = {'Authorization': 'Bearer $apiKey'};
+ if(apiKey.trim().isEmpty){
+ headers['Authorization'] = 'Bearer no-key';
+ }
+
+ // Send api request
+ await getData(
+ url: getUri('/models'),
+ requestHeaders: headers
+ );
+
+ // Decode response
+ final Map jsonData = jsonDecode(responseData);
+ final List modelsJson = jsonData['data'];
+ int? unixTimestamp;
+
+ if(modelsJson.isNotEmpty)
+ {
+ outModelInfo.parentModel = modelsJson.first['id'] ?? '';
+ unixTimestamp = modelsJson.first['created'];
+ outModelInfo.rootModel = '';
+ }
+
+ // Parse unix timestamp
+ if(unixTimestamp != null){
+ final DateTime dateTime = DateTime.fromMillisecondsSinceEpoch(unixTimestamp * 1000, isUtc: true);
+ final String formattedDate = DateFormat('yyyy-MMM-dd HH:mm:ss').format(dateTime);
+ outModelInfo.createdOn = '$formattedDate (UTC)';
+ } else {
+ outModelInfo.createdOn = '';
+ }
+
+ } catch (e) {
+ // Catch and handle the FormatException
+ if (kDebugMode) {
+ print('Unable to retrieve models: $e\n ${AppData.instance.api.responseData}');
+ }
+ }
+
+ }
+
+ @override
+ Future deleteModel(String modelId) async {
+ // todo: allow deletion of tuned models
+ }
+
+ @override
+ Future sendPrompt({
+ required String modelId,
+ required List