forked from microsoft/ailab
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathObjectDetector.cs
101 lines (85 loc) · 3.91 KB
/
ObjectDetector.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
using Microsoft.Azure.CognitiveServices.Vision.CustomVision.Prediction;
using Microsoft.ProjectOxford.Vision;
using Microsoft.ProjectOxford.Vision.Contract;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace Sketch2Code.AI
{
public class ObjectDetector : CustomVisionClient
{
public ObjectDetector()
: base(ConfigurationManager.AppSettings["ObjectDetectionTrainingKey"],
ConfigurationManager.AppSettings["ObjectDetectionPredictionKey"],
ConfigurationManager.AppSettings["ObjectDetectionProjectName"])
{
}
public ObjectDetector(string trainingKey, string predictionKey, string projectName)
: base(trainingKey, predictionKey, projectName)
{
}
public async Task<Microsoft.Azure.CognitiveServices.Vision.CustomVision.Prediction.Models.ImagePrediction> GetDetectedObjects(byte[] image)
{
using (var endpoint = new PredictionEndpoint() { ApiKey = this._predictionApiKey })
{
using (var ms = new MemoryStream(image))
{
return await endpoint.PredictImageAsync(this._project.Id, ms);
}
}
}
public async Task<List<String>> GetText(byte[] image)
{
var list = new List<String>();
try
{
using (var ms = new MemoryStream(image))
{
var operation = await _visionClient.CreateHandwritingRecognitionOperationAsync(ms);
var result = await _visionClient.GetHandwritingRecognitionOperationResultAsync(operation);
while (result.Status != Microsoft.ProjectOxford.Vision.Contract.HandwritingRecognitionOperationStatus.Succeeded)
{
if (result.Status == Microsoft.ProjectOxford.Vision.Contract.HandwritingRecognitionOperationStatus.Failed)
return new List<string>(new string[] { "Text prediction failed" });
await Task.Delay(Convert.ToInt32(ConfigurationManager.AppSettings["ComputerVisionDelay"]));
result = await _visionClient.GetHandwritingRecognitionOperationResultAsync(operation);
}
list = result.RecognitionResult.Lines.SelectMany(l => l.Words?.Select(w => w.Text)).ToList();
}
}
catch (ClientException ex)
{
list.Add($"Text prediction failed: {ex.Error.Message}. Id: {ex.Error.Code}.");
}
return list;
}
public async Task<HandwritingTextLine[]> GetTextRecognition(byte[] image)
{
try
{
using (var ms = new MemoryStream(image))
{
var operation = await _visionClient.CreateHandwritingRecognitionOperationAsync(ms);
var result = await _visionClient.GetHandwritingRecognitionOperationResultAsync(operation);
while (result.Status != Microsoft.ProjectOxford.Vision.Contract.HandwritingRecognitionOperationStatus.Succeeded)
{
if (result.Status == Microsoft.ProjectOxford.Vision.Contract.HandwritingRecognitionOperationStatus.Failed)
{
return null;
}
await Task.Delay(Convert.ToInt32(ConfigurationManager.AppSettings["ComputerVisionDelay"]));
result = await _visionClient.GetHandwritingRecognitionOperationResultAsync(operation);
}
return result.RecognitionResult.Lines;
}
}
catch (ClientException ex)
{
return null;
}
}
}
}