6/30/1
This commit is contained in:
@@ -98,6 +98,7 @@ class CRUD {
|
||||
await arabicTextExtractByVisionAndAI(imagePath: imagePath);
|
||||
var json = jsonDecode(extractedString);
|
||||
var textValues = extractTextFromLines(json);
|
||||
print(textValues);
|
||||
// await Get.put(AI()).geminiAiExtraction(prompt, textValues);
|
||||
await Get.put(AI()).anthropicAI(textValues, prompt, imagePath);
|
||||
}
|
||||
@@ -129,8 +130,9 @@ class CRUD {
|
||||
};
|
||||
|
||||
String imagePathFull =
|
||||
'${AppLink.server}card_image/$imagePath-${box.read(BoxName.driverID)}.jpg';
|
||||
|
||||
'${AppLink.server}/card_image/$imagePath-${box.read(BoxName.driverID)}.jpg';
|
||||
// print(box.read(BoxName.driverID));
|
||||
// print(imagePathFull);
|
||||
var request = http.Request('POST', Uri.parse(
|
||||
// 'https://ocrhamza.cognitiveservices.azure.com/vision/v2.1/ocr?language=ar'));
|
||||
// 'https://eastus.api.cognitive.microsoft.com/vision/v3.2/ocr'
|
||||
@@ -141,6 +143,7 @@ class CRUD {
|
||||
http.StreamedResponse response = await request.send();
|
||||
|
||||
if (response.statusCode == 200) {
|
||||
// print(await response.stream.bytesToString());
|
||||
return await response.stream.bytesToString();
|
||||
} else {}
|
||||
}
|
||||
@@ -218,7 +221,9 @@ class CRUD {
|
||||
'Basic ${base64Encode(utf8.encode(AK.basicAuthCredentials))}',
|
||||
},
|
||||
);
|
||||
|
||||
print(response.request);
|
||||
print(response.body);
|
||||
print(response.statusCode);
|
||||
var jsonData = jsonDecode(response.body);
|
||||
if (response.statusCode == 200) {
|
||||
if (jsonData['status'] == 'success') {
|
||||
|
||||
@@ -393,10 +393,13 @@ class AI extends GetxController {
|
||||
update();
|
||||
await ImageController().choosImage(linkPHP, imagePath);
|
||||
Future.delayed(const Duration(seconds: 2));
|
||||
// print(imagePath);
|
||||
var extractedString =
|
||||
await CRUD().arabicTextExtractByVisionAndAI(imagePath: imagePath);
|
||||
print(extractedString);
|
||||
var json = jsonDecode(extractedString);
|
||||
var textValues = CRUD().extractTextFromLines(json);
|
||||
print(textValues);
|
||||
// await Get.put(AI()).geminiAiExtraction(prompt, textValues, imagePath);
|
||||
await Get.put(AI()).anthropicAI(textValues, prompt, imagePath);
|
||||
isLoading = false;
|
||||
|
||||
@@ -7,6 +7,14 @@ void showInBrowser(String url) async {
|
||||
} else {}
|
||||
}
|
||||
|
||||
Future<void> makePhoneCall(String phoneNumber) async {
|
||||
final Uri launchUri = Uri(
|
||||
scheme: 'tel',
|
||||
path: phoneNumber,
|
||||
);
|
||||
await launchUrl(launchUri);
|
||||
}
|
||||
|
||||
void launchCommunication(
|
||||
String method, String contactInfo, String message) async {
|
||||
String url;
|
||||
|
||||
@@ -54,6 +54,7 @@ class ImageController extends GetxController {
|
||||
update();
|
||||
// Save the cropped image
|
||||
File savedCroppedImage = File(croppedFile!.path);
|
||||
print('link =$link');
|
||||
try {
|
||||
await uploadImage(
|
||||
savedCroppedImage,
|
||||
|
||||
Reference in New Issue
Block a user