Added centralized network client

This commit is contained in:
JHubi1 2024-12-13 22:07:21 +01:00
parent 011e187e3a
commit b0a8f42771
No known key found for this signature in database
GPG Key ID: F538DC3FC5B07498
9 changed files with 64 additions and 56 deletions

View File

@ -19,7 +19,7 @@ pluginManagement {
plugins {
id "dev.flutter.flutter-plugin-loader" version "1.0.0"
id "com.android.application" version "7.3.0" apply false
id "org.jetbrains.kotlin.android" version "2.0.0" apply false
id "org.jetbrains.kotlin.android" version "1.9.10" apply false
}
include ":app"

View File

@ -17,6 +17,7 @@ import 'worker/sender.dart';
import 'worker/desktop.dart';
import 'worker/theme.dart';
import 'worker/update.dart';
import 'worker/clients.dart';
import 'package:shared_preferences/shared_preferences.dart';
// ignore: depend_on_referenced_packages
@ -94,6 +95,10 @@ void Function(void Function())? setMainAppState;
void main() {
pwa.PWAInstall().setup(installCallback: () {});
try {
HttpOverrides.global = OllamaHttpOverrides();
} catch (_) {}
runApp(const App());
if (desktopFeature()) {
@ -950,7 +955,7 @@ class _MainAppState extends State<MainApp> {
const SizedBox(width: 4),
allowMultipleChats
? IconButton(
enableFeedback: false,
enableFeedback: false,
onPressed: () {
selectionHaptic();
if (!chatAllowed) return;

View File

@ -1,4 +1,5 @@
import 'dart:convert';
import 'dart:async';
import 'package:flutter/material.dart';
@ -7,6 +8,7 @@ import 'worker/haptic.dart';
import 'worker/update.dart';
import 'worker/desktop.dart';
import 'worker/setter.dart';
import 'worker/clients.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'settings/behavior.dart';
@ -343,7 +345,8 @@ class _ScreenSettingsState extends State<ScreenSettings> {
http.Response? request;
try {
var client = http.Client();
// don't use centralized client because of unexplainable inconsistency
// between the ways of calling a request
final requestBase = http.Request("get", Uri.parse(tmpHost))
..headers.addAll(
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
@ -357,7 +360,6 @@ class _ScreenSettingsState extends State<ScreenSettings> {
.round()), onTimeout: () {
return http.StreamedResponse(const Stream.empty(), 408);
}));
client.close();
} catch (e) {
setState(() {
hostInvalidHost = true;

View File

@ -1,6 +1,5 @@
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'package:speech_to_text/speech_to_text.dart' as stt;
import 'package:ollama_dart/ollama_dart.dart' as llama;
@ -106,11 +105,7 @@ class _ScreenVoiceState extends State<ScreenVoice> {
aiThinking = true;
try {
if (prefs!.getBool("aiPunctuation") ?? true) {
final generated = await llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api",
)
final generated = await ollamaClient
.generateCompletion(
request: llama.GenerateCompletionRequest(
model: model!,
@ -218,7 +213,7 @@ class _ScreenVoiceState extends State<ScreenVoice> {
child: Scaffold(
appBar: AppBar(
leading: IconButton(
enableFeedback: false,
enableFeedback: false,
onPressed: () {
Navigator.of(context).pop();
},
@ -241,7 +236,7 @@ class _ScreenVoiceState extends State<ScreenVoice> {
),
actions: [
IconButton(
enableFeedback: false,
enableFeedback: false,
onPressed: () {
speaking = false;
settingsOpen = false;

22
lib/worker/clients.dart Normal file
View File

@ -0,0 +1,22 @@
import 'dart:io';
import 'dart:convert';
import 'package:http/http.dart' as http;
import 'package:ollama_dart/ollama_dart.dart' as llama;
import '../main.dart';
class OllamaHttpOverrides extends HttpOverrides {
@override
HttpClient createHttpClient(SecurityContext? context) {
return super.createHttpClient(context)
..badCertificateCallback = (_, __, ___) => true;
}
}
final httpClient = http.Client();
llama.OllamaClient get ollamaClient => llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api",
client: httpClient);

View File

@ -2,6 +2,7 @@ import 'dart:convert';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'haptic.dart';
import 'setter.dart';
@ -81,10 +82,7 @@ List getHistoryString([String? uuid]) {
}
Future<String> getTitleAi(List history) async {
final generated = await (llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api"))
final generated = await ollamaClient
.generateChatCompletion(
request: llama.GenerateChatCompletionRequest(
model: model!,
@ -217,16 +215,11 @@ Future<String> send(String value, BuildContext context, Function setState,
chatAllowed = false;
String text = "";
String newId = const Uuid().v4();
llama.OllamaClient client = llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api");
try {
if ((prefs!.getString("requestType") ?? "stream") == "stream") {
final stream = client
final stream = ollamaClient
.generateChatCompletionStream(
request: llama.GenerateChatCompletionRequest(
model: model!,
@ -260,7 +253,7 @@ Future<String> send(String value, BuildContext context, Function setState,
}
} else {
llama.GenerateChatCompletionResponse request;
request = await client
request = await ollamaClient
.generateChatCompletion(
request: llama.GenerateChatCompletionRequest(
model: model!,

View File

@ -5,6 +5,7 @@ import 'package:flutter/material.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'package:ollama_app/worker/clients.dart';
import 'desktop.dart';
import 'haptic.dart';
import '../main.dart';
@ -31,15 +32,9 @@ void setModel(BuildContext context, Function setState) {
setState(() {});
void load() async {
try {
var list = await llama.OllamaClient(
headers:
(jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api")
.listModels()
.timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
.round()));
var list = await ollamaClient.listModels().timeout(Duration(
seconds:
(10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)).round()));
for (var i = 0; i < list.models!.length; i++) {
models.add(list.models![i].model!.split(":")[0]);
modelsReal.add(list.models![i].model!);
@ -123,7 +118,7 @@ void setModel(BuildContext context, Function setState) {
setLocalState(() {});
try {
// don't use llama client, package doesn't support just loading without content
await http
await httpClient
.post(
Uri.parse("$host/api/generate"),
headers: {
@ -304,10 +299,6 @@ void setModel(BuildContext context, Function setState) {
}
void addModel(BuildContext context, Function setState) async {
var client = llama.OllamaClient(
headers: (jsonDecode(prefs!.getString("hostHeaders") ?? "{}") as Map)
.cast<String, String>(),
baseUrl: "$host/api");
bool canceled = false;
bool networkError = false;
bool ratelimitError = false;
@ -341,7 +332,7 @@ void addModel(BuildContext context, Function setState) async {
ratelimitError = false;
alreadyExists = false;
try {
var request = await client.listModels().timeout(Duration(
var request = await ollamaClient.listModels().timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
.round()));
for (var element in request.models!) {
@ -395,7 +386,7 @@ void addModel(BuildContext context, Function setState) async {
}
http.Response response;
try {
response = await http
response = await httpClient
.get(Uri.parse("$endpoint${Uri.encodeComponent(model)}"))
.timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
@ -490,7 +481,7 @@ void addModel(BuildContext context, Function setState) async {
});
});
try {
final stream = client
final stream = ollamaClient
.pullModelStream(request: llama.PullModelRequest(model: requestedModel))
.timeout(Duration(
seconds: (10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0))
@ -520,7 +511,7 @@ void addModel(BuildContext context, Function setState) async {
}
bool exists = false;
try {
var request = await client.listModels().timeout(Duration(
var request = await ollamaClient.listModels().timeout(Duration(
seconds:
(10.0 * (prefs!.getDouble("timeoutMultiplier") ?? 1.0)).round()));
for (var element in request.models!) {

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:ollama_app/worker/clients.dart';
import 'package:ollama_app/worker/desktop.dart';
import 'haptic.dart';
@ -10,7 +11,6 @@ import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import '../main.dart';
import 'package:http/http.dart' as http;
import 'package:install_referrer/install_referrer.dart';
import 'package:package_info_plus/package_info_plus.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
@ -85,7 +85,7 @@ Future<bool> checkUpdate(Function setState) async {
String? version;
try {
var request = await http
var request = await httpClient
.get(Uri.parse(
"https://api.github.com/repos/${repo[3]}/${repo[4]}/releases"))
.timeout(Duration(

View File

@ -529,18 +529,18 @@ packages:
dependency: transitive
description:
name: leak_tracker
sha256: "7f0df31977cb2c0b88585095d168e689669a2cc9b97c309665e3386f3e9d341a"
sha256: "3f87a60e8c63aecc975dda1ceedbc8f24de75f09e4856ea27daf8958f2f0ce05"
url: "https://pub.dev"
source: hosted
version: "10.0.4"
version: "10.0.5"
leak_tracker_flutter_testing:
dependency: transitive
description:
name: leak_tracker_flutter_testing
sha256: "06e98f569d004c1315b991ded39924b21af84cf14cc94791b8aea337d25b57f8"
sha256: "932549fb305594d82d7183ecd9fa93463e9914e1b67cacc34bc40906594a1806"
url: "https://pub.dev"
source: hosted
version: "3.0.3"
version: "3.0.5"
leak_tracker_testing:
dependency: transitive
description:
@ -585,18 +585,18 @@ packages:
dependency: transitive
description:
name: material_color_utilities
sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a"
sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec
url: "https://pub.dev"
source: hosted
version: "0.8.0"
version: "0.11.1"
meta:
dependency: transitive
description:
name: meta
sha256: "7687075e408b093f36e6bbf6c91878cc0d4cd10f409506f7bc996f68220b9136"
sha256: bdb68674043280c3428e9ec998512fb681678676b3c54e773629ffe74419f8c7
url: "https://pub.dev"
source: hosted
version: "1.12.0"
version: "1.15.0"
mime:
dependency: transitive
description:
@ -902,10 +902,10 @@ packages:
dependency: transitive
description:
name: test_api
sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f"
sha256: "5b8a98dafc4d5c4c9c72d8b31ab2b23fc13422348d2997120294d3bac86b4ddb"
url: "https://pub.dev"
source: hosted
version: "0.7.0"
version: "0.7.2"
time:
dependency: transitive
description:
@ -1046,10 +1046,10 @@ packages:
dependency: transitive
description:
name: vm_service
sha256: "3923c89304b715fb1eb6423f017651664a03bf5f4b29983627c4da791f74a4ec"
sha256: "5c5f338a667b4c644744b661f309fb8080bb94b18a7e91ef1dbd343bed00ed6d"
url: "https://pub.dev"
source: hosted
version: "14.2.1"
version: "14.2.5"
volume_controller:
dependency: "direct main"
description: