Added custom header option

This commit is contained in:
JHubi1 2024-06-01 15:46:08 +02:00
parent 8eb3dfb35e
commit 94820a300e
No known key found for this signature in database
GPG Key ID: 7BF82570CBBBD050
4 changed files with 84 additions and 15 deletions

View File

@ -130,7 +130,7 @@
"description": "Text displayed as description for new title input",
"context": "Visible in the rename dialog"
},
"dialogEditMessageTitle": "Edit Message",
"dialogEditMessageTitle": "Edit message",
"@dialogEditMessageTitle": {
"description": "Title of the edit message dialog",
"context": "Visible in the edit message dialog"
@ -181,6 +181,16 @@
}
}
},
"settingsHostHeaderTitle": "Set host header",
"@settingsHostHeader": {
"description": "Text displayed as description for host header input",
"context": "Visible in the settings view"
},
"settingsHostHeaderInvalid": "The entered text isn't a valid header JSON object",
"@settingsHostHeaderInvalid": {
"description": "Text displayed when the host header is invalid",
"context": "Visible in the settings view"
},
"settingsHostInvalidDetailed": "{type, select, url{The URL you entered is invalid. It isn't an a standardized URL format.} other{The host you entered is invalid. It cannot be reached. Please check the host and try again.}}",
"@settingsHostInvalidDetailed": {
"description": "Text displayed when the host is invalid",

View File

@ -674,8 +674,12 @@ class _MainAppState extends State<MainApp> {
chatAllowed = false;
String newId = const Uuid().v4();
llama.OllamaClient client =
llama.OllamaClient(baseUrl: "$host/api");
llama.OllamaClient client = llama.OllamaClient(
headers:
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}")
as Map)
.cast<String, String>(),
baseUrl: "$host/api");
try {
if ((prefs!.getString("requestType") ?? "stream") ==
@ -1242,7 +1246,6 @@ class _MainAppState extends State<MainApp> {
title: AppLocalizations.of(context)!
.dialogEnterNewTitle,
value: oldTitle,
force: false,
uuid: jsonDecode(item)["uuid"]);
var tmp = (prefs!.getStringList("chats") ?? []);
for (var i = 0; i < tmp.length; i++) {

View File

@ -6,7 +6,7 @@ import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'main.dart';
import 'package:intl/intl.dart';
import 'package:ollama_app/worker_setter.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'package:dartx/dartx.dart';
@ -16,6 +16,7 @@ import 'package:url_launcher/url_launcher.dart';
import 'package:restart_app/restart_app.dart';
import 'package:file_picker/file_picker.dart';
import 'package:pick_or_save/pick_or_save.dart';
import 'package:intl/intl.dart';
class ScreenSettings extends StatefulWidget {
const ScreenSettings({super.key});
@ -49,7 +50,11 @@ class _ScreenSettingsState extends State<ScreenSettings> {
http.Response request;
try {
request = await http
.get(Uri.parse(tmpHost))
.get(
Uri.parse(tmpHost),
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
)
.timeout(const Duration(seconds: 5), onTimeout: () {
return http.Response("Error", 408);
});
@ -165,6 +170,29 @@ class _ScreenSettingsState extends State<ScreenSettings> {
decoration: InputDecoration(
labelText: AppLocalizations.of(context)!.settingsHost,
hintText: "http://localhost:11434",
prefixIcon: IconButton(
onPressed: () async {
HapticFeedback.selectionClick();
String tmp = await prompt(context,
placeholder:
"{\"Authorization\": \"Bearer ...\"}",
title: AppLocalizations.of(context)!
.settingsHostHeaderTitle,
value: (prefs!.getString("hostHeaders") ??
""), validator: (content) async {
try {
var tmp = jsonDecode(content);
tmp as Map<String, dynamic>;
return true;
} catch (_) {
return false;
}
},
validatorError: AppLocalizations.of(context)!
.settingsHostHeaderInvalid);
prefs!.setString("hostHeaders", tmp);
},
icon: const Icon(Icons.add_rounded)),
suffixIcon: useHost
? const SizedBox.shrink()
: (hostLoading

View File

@ -21,7 +21,12 @@ void setModel(BuildContext context, Function setState) {
bool loaded = false;
Function? setModalState;
void load() async {
var list = await llama.OllamaClient(baseUrl: "$host/api").listModels();
var list = await llama.OllamaClient(
headers:
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api")
.listModels();
for (var i = 0; i < list.models!.length; i++) {
models.add(list.models![i].model!.split(":")[0]);
modelsReal.add(list.models![i].model!);
@ -284,15 +289,18 @@ Future<String> prompt(BuildContext context,
{String description = "",
String value = "",
String title = "",
bool force = false,
String? valueIfCanceled,
TextInputType keyboard = TextInputType.text,
Icon? prefixIcon,
int maxLines = 1,
String? uuid}) async {
String? uuid,
Future<bool> Function(String content)? validator,
String? validatorError,
String? placeholder}) async {
var returnText = (valueIfCanceled != null) ? valueIfCanceled : value;
final TextEditingController controller = TextEditingController(text: value);
bool loading = false;
String? error;
await showModalBottomSheet(
context: context,
isScrollControlled: true,
@ -329,9 +337,25 @@ Future<String> prompt(BuildContext context,
maxLines: maxLines,
decoration: InputDecoration(
border: const OutlineInputBorder(),
hintText: placeholder,
errorText: error,
suffixIcon: IconButton(
onPressed: () {
onPressed: () async {
if (validator != null) {
setLocalState(() {
error = null;
});
bool valid =
await validator(controller.text);
if (!valid) {
setLocalState(() {
error = validatorError;
});
return;
}
}
returnText = controller.text;
// ignore: use_build_context_synchronously
Navigator.of(context).pop();
},
icon: const Icon(Icons.save_rounded)),
@ -381,10 +405,14 @@ Future<String> prompt(BuildContext context,
return;
}
final generated =
await llama.OllamaClient(
baseUrl:
"$host/api")
final generated = await llama.OllamaClient(
headers: (jsonDecode(prefs!
.getString(
"hostHeaders") ??
"{}") as Map)
.cast<String,
String>(),
baseUrl: "$host/api")
.generateCompletion(
request: llama
.GenerateCompletionRequest(