4/26/1
This commit is contained in:
@@ -1,11 +1,15 @@
|
||||
import 'dart:convert';
|
||||
import 'package:SEFER/constant/box_name.dart';
|
||||
import 'package:SEFER/constant/links.dart';
|
||||
import 'package:SEFER/main.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:SEFER/env/env.dart';
|
||||
|
||||
import '../../constant/api_key.dart';
|
||||
import 'gemeni.dart';
|
||||
import 'llama_ai.dart';
|
||||
import 'upload_image.dart';
|
||||
|
||||
class CRUD {
|
||||
Future<dynamic> get({
|
||||
@@ -62,21 +66,25 @@ class CRUD {
|
||||
Future<dynamic> getLlama({
|
||||
required String link,
|
||||
required String payload,
|
||||
required String prompt,
|
||||
}) async {
|
||||
var url = Uri.parse(
|
||||
link,
|
||||
);
|
||||
var headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ${Env.llamaKey}'
|
||||
'Authorization':
|
||||
'Bearer LL-X5lJ0Px9CzKK0HTuVZ3u2u4v3tGWkImLTG7okGRk4t25zrsLqJ0qNoUzZ2x4ciPy'
|
||||
// 'Authorization': 'Bearer ${Env.llamaKey}'
|
||||
};
|
||||
var data = json.encode({
|
||||
"model": "llama-13b-chat",
|
||||
"model": "Llama-3-70b-Inst-FW",
|
||||
// "model": "llama-13b-chat",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content":
|
||||
"Extract the desired information from the following passage as json decoded like vin,make,made,year,expiration_date,color,owner,registration_date just in this:\n\n$payload"
|
||||
"Extract the desired information from the following passage as json decoded like $prompt just in this:\n\n$payload"
|
||||
}
|
||||
],
|
||||
"temperature": 0.9
|
||||
@@ -93,6 +101,44 @@ class CRUD {
|
||||
return response.statusCode;
|
||||
}
|
||||
|
||||
Future allMethodForAI(String prompt, linkPHP, imagePath) async {
|
||||
await ImageController().choosImage(linkPHP, imagePath);
|
||||
Future.delayed(const Duration(seconds: 2));
|
||||
String extracted =
|
||||
await arabicTextExtractByVisionAndAI(imagePath: imagePath);
|
||||
await AI().geminiAiExtraction(prompt, extracted);
|
||||
}
|
||||
|
||||
Future<dynamic> arabicTextExtractByVisionAndAI({
|
||||
required String imagePath,
|
||||
}) async {
|
||||
var headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Ocp-Apim-Subscription-Key': '21010e54b50f41a4904708c526e102df'
|
||||
};
|
||||
var url = Uri.parse(
|
||||
'https://ocrhamza.cognitiveservices.azure.com/vision/v2.1/ocr?language=ar',
|
||||
);
|
||||
String imagePathFull =
|
||||
'${AppLink.server}card_image/$imagePath-${box.read(BoxName.driverID) ?? box.read(BoxName.passengerID)}.jpg';
|
||||
|
||||
print('imagePath=$imagePathFull');
|
||||
var requestBody = {"url": imagePathFull};
|
||||
var response = await http.post(
|
||||
url,
|
||||
body: jsonEncode(requestBody), // Encode the JSON object to a string
|
||||
headers: headers,
|
||||
);
|
||||
|
||||
if (response.statusCode == 200) {
|
||||
var responseBody = jsonDecode(response.body);
|
||||
// print(decode);
|
||||
print('imagePath=$imagePathFull');
|
||||
return responseBody.toString();
|
||||
}
|
||||
return response.statusCode;
|
||||
}
|
||||
|
||||
Future<dynamic> getChatGPT({
|
||||
required String link,
|
||||
required String payload,
|
||||
|
||||
Reference in New Issue
Block a user