Added centralized network client

This commit is contained in:
JHubi1 2024-12-13 22:07:21 +01:00
parent 011e187e3a
commit b0a8f42771
No known key found for this signature in database
GPG Key ID: F538DC3FC5B07498
9 changed files with 64 additions and 56 deletions

View File

@ -19,7 +19,7 @@ pluginManagement {
plugins { plugins {
id "dev.flutter.flutter-plugin-loader" version "1.0.0" id "dev.flutter.flutter-plugin-loader" version "1.0.0"
id "com.android.application" version "7.3.0" apply false id "com.android.application" version "7.3.0" apply false
id "org.jetbrains.kotlin.android" version "2.0.0" apply false id "org.jetbrains.kotlin.android" version "1.9.10" apply false
} }
include ":app" include ":app"

View File

@ -17,6 +17,7 @@ import 'worker/sender.dart';
import 'worker/desktop.dart'; import 'worker/desktop.dart';
import 'worker/theme.dart'; import 'worker/theme.dart';
import 'worker/update.dart'; import 'worker/update.dart';
import 'worker/clients.dart';
import 'package:shared_preferences/shared_preferences.dart'; import 'package:shared_preferences/shared_preferences.dart';
// ignore: depend_on_referenced_packages // ignore: depend_on_referenced_packages
@ -94,6 +95,10 @@ void Function(void Function())? setMainAppState;
void main() { void main() {
pwa.PWAInstall().setup(installCallback: () {}); pwa.PWAInstall().setup(installCallback: () {});
try {
HttpOverrides.global = OllamaHttpOverrides();
} catch (_) {}
runApp(const App()); runApp(const App());
if (desktopFeature()) { if (desktopFeature()) {

View File

@ -1,4 +1,5 @@
import 'dart:convert'; import 'dart:convert';
import 'dart:async';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
@ -7,6 +8,7 @@ import 'worker/haptic.dart';
import 'worker/update.dart'; import 'worker/update.dart';
import 'worker/desktop.dart'; import 'worker/desktop.dart';
import 'worker/setter.dart'; import 'worker/setter.dart';
import 'worker/clients.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'settings/behavior.dart'; import 'settings/behavior.dart';
@ -343,7 +345,8 @@ class _ScreenSettingsState extends State<ScreenSettings> {
http.Response? request; http.Response? request;
try { try {
var client = http.Client(); // don't use centralized client because of unexplainable inconsistency
// between the ways of calling a request
final requestBase = http.Request("get", Uri.parse(tmpHost)) final requestBase = http.Request("get", Uri.parse(tmpHost))
..headers.addAll( ..headers.addAll(
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map) (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
@ -357,7 +360,6 @@ class _ScreenSettingsState extends State<ScreenSettings> {
.round()), onTimeout: () { .round()), onTimeout: () {
return http.StreamedResponse(const Stream.empty(), 408); return http.StreamedResponse(const Stream.empty(), 408);
})); }));
client.close();
} catch (e) { } catch (e) {
setState(() { setState(() {
hostInvalidHost = true; hostInvalidHost = true;

View File

@ -1,6 +1,5 @@
import 'dart:convert';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'package:speech_to_text/speech_to_text.dart' as stt; import 'package:speech_to_text/speech_to_text.dart' as stt;
import 'package:ollama_dart/ollama_dart.dart' as llama; import 'package:ollama_dart/ollama_dart.dart' as llama;
@ -106,11 +105,7 @@ class _ScreenVoiceState extends State<ScreenVoice> {
aiThinking = true; aiThinking = true;
try { try {
if (prefs!.getBool("aiPunctuation") ?? true) { if (prefs!.getBool("aiPunctuation") ?? true) {
final generated = await llama.OllamaClient( final generated = await ollamaClient
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api",
)
.generateCompletion( .generateCompletion(
request: llama.GenerateCompletionRequest( request: llama.GenerateCompletionRequest(
model: model!, model: model!,

22
lib/worker/clients.dart Normal file
View File

@ -0,0 +1,22 @@
import 'dart:io';
import 'dart:convert';
import 'package:http/http.dart' as http;
import 'package:ollama_dart/ollama_dart.dart' as llama;
import '../main.dart';
class OllamaHttpOverrides extends HttpOverrides {
@override
HttpClient createHttpClient(SecurityContext? context) {
return super.createHttpClient(context)
..badCertificateCallback = (_, __, ___) => true;
}
}
final httpClient = http.Client();
llama.OllamaClient get ollamaClient => llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api",
client: httpClient);

View File

@ -2,6 +2,7 @@ import 'dart:convert';
import 'dart:io'; import 'dart:io';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'haptic.dart'; import 'haptic.dart';
import 'setter.dart'; import 'setter.dart';
@ -81,10 +82,7 @@ List getHistoryString([String? uuid]) {
} }
Future<String> getTitleAi(List history) async { Future<String> getTitleAi(List history) async {
final generated = await (llama.OllamaClient( final generated = await ollamaClient
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api"))
.generateChatCompletion( .generateChatCompletion(
request: llama.GenerateChatCompletionRequest( request: llama.GenerateChatCompletionRequest(
model: model!, model: model!,
@ -217,16 +215,11 @@ Future<String> send(String value, BuildContext context, Function setState,
chatAllowed = false; chatAllowed = false;
String text = ""; String text = "";
String newId = const Uuid().v4(); String newId = const Uuid().v4();
llama.OllamaClient client = llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api");
try { try {
if ((prefs!.getString("requestType") ?? "stream") == "stream") { if ((prefs!.getString("requestType") ?? "stream") == "stream") {
final stream = client final stream = ollamaClient
.generateChatCompletionStream( .generateChatCompletionStream(
request: llama.GenerateChatCompletionRequest( request: llama.GenerateChatCompletionRequest(
model: model!, model: model!,
@ -260,7 +253,7 @@ Future<String> send(String value, BuildContext context, Function setState,
} }
} else { } else {
llama.GenerateChatCompletionResponse request; llama.GenerateChatCompletionResponse request;
request = await client request = await ollamaClient
.generateChatCompletion( .generateChatCompletion(
request: llama.GenerateChatCompletionRequest( request: llama.GenerateChatCompletionRequest(
model: model!, model: model!,

View File

@ -5,6 +5,7 @@ import 'package:flutter/material.dart';
import 'package:flutter/foundation.dart'; import 'package:flutter/foundation.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'package:ollama_app/worker/clients.dart';
import 'desktop.dart'; import 'desktop.dart';
import 'haptic.dart'; import 'haptic.dart';
import '../main.dart'; import '../main.dart';
@ -31,15 +32,9 @@ void setModel(BuildContext context, Function setState) {
setState(() {}); setState(() {});
void load() async { void load() async {
try { try {
var list = await llama.OllamaClient( var list = await ollamaClient.listModels().timeout(Duration(
headers: seconds:
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map) (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)).round()));
.cast<String, String>(),
baseUrl: "$host/api")
.listModels()
.timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
.round()));
for (var i = 0; i < list.models!.length; i++) { for (var i = 0; i < list.models!.length; i++) {
models.add(list.models![i].model!.split(":")[0]); models.add(list.models![i].model!.split(":")[0]);
modelsReal.add(list.models![i].model!); modelsReal.add(list.models![i].model!);
@ -123,7 +118,7 @@ void setModel(BuildContext context, Function setState) {
setLocalState(() {}); setLocalState(() {});
try { try {
// don't use llama client, package doesn't support just loading without content // don't use llama client, package doesn't support just loading without content
await http await httpClient
.post( .post(
Uri.parse("$host/api/generate"), Uri.parse("$host/api/generate"),
headers: { headers: {
@ -304,10 +299,6 @@ void setModel(BuildContext context, Function setState) {
} }
void addModel(BuildContext context, Function setState) async { void addModel(BuildContext context, Function setState) async {
var client = llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api");
bool canceled = false; bool canceled = false;
bool networkError = false; bool networkError = false;
bool ratelimitError = false; bool ratelimitError = false;
@ -341,7 +332,7 @@ void addModel(BuildContext context, Function setState) async {
ratelimitError = false; ratelimitError = false;
alreadyExists = false; alreadyExists = false;
try { try {
var request = await client.listModels().timeout(Duration( var request = await ollamaClient.listModels().timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)) seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
.round())); .round()));
for (var element in request.models!) { for (var element in request.models!) {
@ -395,7 +386,7 @@ void addModel(BuildContext context, Function setState) async {
} }
http.Response response; http.Response response;
try { try {
response = await http response = await httpClient
.get(Uri.parse("$endpoint${Uri.encodeComponent(model)}")) .get(Uri.parse("$endpoint${Uri.encodeComponent(model)}"))
.timeout(Duration( .timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)) seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
@ -490,7 +481,7 @@ void addModel(BuildContext context, Function setState) async {
}); });
}); });
try { try {
final stream = client final stream = ollamaClient
.pullModelStream(request: llama.PullModelRequest(model: requestedModel)) .pullModelStream(request: llama.PullModelRequest(model: requestedModel))
.timeout(Duration( .timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)) seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
@ -520,7 +511,7 @@ void addModel(BuildContext context, Function setState) async {
} }
bool exists = false; bool exists = false;
try { try {
var request = await client.listModels().timeout(Duration( var request = await ollamaClient.listModels().timeout(Duration(
seconds: seconds:
(10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)).round())); (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)).round()));
for (var element in request.models!) { for (var element in request.models!) {

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:convert'; import 'dart:convert';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'package:ollama_app/worker/desktop.dart'; import 'package:ollama_app/worker/desktop.dart';
import 'haptic.dart'; import 'haptic.dart';
@ -10,7 +11,6 @@ import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import '../main.dart'; import '../main.dart';
import 'package:http/http.dart' as http;
import 'package:install_referrer/install_referrer.dart'; import 'package:install_referrer/install_referrer.dart';
import 'package:package_info_plus/package_info_plus.dart'; import 'package:package_info_plus/package_info_plus.dart';
import 'package:flutter_markdown/flutter_markdown.dart'; import 'package:flutter_markdown/flutter_markdown.dart';
@ -85,7 +85,7 @@ Future<bool> checkUpdate(Function setState) async {
String? version; String? version;
try { try {
var request = await http var request = await httpClient
.get(Uri.parse( .get(Uri.parse(
"https://api.github.com/repos/${repo[3]}/${repo[4]}/releases")) "https://api.github.com/repos/${repo[3]}/${repo[4]}/releases"))
.timeout(Duration( .timeout(Duration(

View File

@ -529,18 +529,18 @@ packages:
dependency: transitive dependency: transitive
description: description:
name: leak_tracker name: leak_tracker
sha256: "7f0df31977cb2c0b88585095d168e689669a2cc9b97c309665e3386f3e9d341a" sha256: "3f87a60e8c63aecc975dda1ceedbc8f24de75f09e4856ea27daf8958f2f0ce05"
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "10.0.4" version: "10.0.5"
leak_tracker_flutter_testing: leak_tracker_flutter_testing:
dependency: transitive dependency: transitive
description: description:
name: leak_tracker_flutter_testing name: leak_tracker_flutter_testing
sha256: "06e98f569d004c1315b991ded39924b21af84cf14cc94791b8aea337d25b57f8" sha256: "932549fb305594d82d7183ecd9fa93463e9914e1b67cacc34bc40906594a1806"
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "3.0.3" version: "3.0.5"
leak_tracker_testing: leak_tracker_testing:
dependency: transitive dependency: transitive
description: description:
@ -585,18 +585,18 @@ packages:
dependency: transitive dependency: transitive
description: description:
name: material_color_utilities name: material_color_utilities
sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a" sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "0.8.0" version: "0.11.1"
meta: meta:
dependency: transitive dependency: transitive
description: description:
name: meta name: meta
sha256: "7687075e408b093f36e6bbf6c91878cc0d4cd10f409506f7bc996f68220b9136" sha256: bdb68674043280c3428e9ec998512fb681678676b3c54e773629ffe74419f8c7
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "1.12.0" version: "1.15.0"
mime: mime:
dependency: transitive dependency: transitive
description: description:
@ -902,10 +902,10 @@ packages:
dependency: transitive dependency: transitive
description: description:
name: test_api name: test_api
sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f" sha256: "5b8a98dafc4d5c4c9c72d8b31ab2b23fc13422348d2997120294d3bac86b4ddb"
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "0.7.0" version: "0.7.2"
time: time:
dependency: transitive dependency: transitive
description: description:
@ -1046,10 +1046,10 @@ packages:
dependency: transitive dependency: transitive
description: description:
name: vm_service name: vm_service
sha256: "3923c89304b715fb1eb6423f017651664a03bf5f4b29983627c4da791f74a4ec" sha256: "5c5f338a667b4c644744b661f309fb8080bb94b18a7e91ef1dbd343bed00ed6d"
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "14.2.1" version: "14.2.5"
volume_controller: volume_controller:
dependency: "direct main" dependency: "direct main"
description: description: