-
Notifications
You must be signed in to change notification settings - Fork 18
/
Copy pathCoreMLImage.swift
150 lines (112 loc) · 4.55 KB
/
CoreMLImage.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
//
// Created by Daryl Rowland on 15/12/2017.
// Copyright © Jigsaw XYZ
//
import Foundation
import UIKit
import CoreML
import AVFoundation
import Vision
@available(iOS 11.0, *)
@objc(CoreMLImage)
public class CoreMLImage: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
var bridge: RCTEventDispatcher!
var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var model: VNCoreMLModel?
var lastClassification: String = ""
var onClassification: RCTBubblingEventBlock?
required public init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)!
}
override init(frame: CGRect) {
super.init(frame: frame)
self.frame = frame;
}
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let img = self.imageFromSampleBuffer(sampleBuffer: sampleBuffer)
runMachineLearning(img: img)
}
func runMachineLearning(img: CIImage) {
if (self.model != nil) {
let request = VNCoreMLRequest(model: self.model!, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
request.imageCropAndScaleOption = .centerCrop
let orientation = CGImagePropertyOrientation.up
DispatchQueue.global(qos: .userInitiated).async {
let handler = VNImageRequestHandler(ciImage: img, orientation: orientation)
do {
try handler.perform([request])
} catch {
print("Failed to perform classification.\n\(error.localizedDescription)")
}
}
}
}
func processClassifications(for request: VNRequest, error: Error?) {
DispatchQueue.main.async {
guard let results = request.results else {
print("Unable to classify image")
print(error!.localizedDescription)
return
}
let classifications = results as! [VNClassificationObservation]
var classificationArray = [Dictionary<String, Any>]()
classifications.forEach{classification in
classificationArray.append(["identifier": classification.identifier, "confidence": classification.confidence])
}
self.onClassification!(["classifications": classificationArray])
}
}
func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> CIImage
{
let imageBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let ciimage : CIImage = CIImage(cvPixelBuffer: imageBuffer)
return ciimage
}
override public func layoutSubviews() {
super.layoutSubviews()
let view = UIView(frame: CGRect(x: 0, y: 0, width: self.frame.width,
height: self.frame.height))
let captureDevice = AVCaptureDevice.default(for: .video)
do {
if (captureDevice != nil) {
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.captureSession = AVCaptureSession()
self.captureSession?.addInput(input)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
self.addSubview(view)
let videoDataOutput = AVCaptureVideoDataOutput()
let queue = DispatchQueue(label: "xyz.jigswaw.ml.queue")
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
guard (self.captureSession?.canAddOutput(videoDataOutput))! else {
fatalError()
}
self.captureSession?.addOutput(videoDataOutput)
self.captureSession?.startRunning()
}
} catch {
print(error)
}
}
@objc(setModelFile:) public func setModelFile(modelFile: String) {
print("Setting model file to: " + modelFile)
let path = Bundle.main.url(forResource: modelFile, withExtension: "mlmodelc")
do {
let modelUrl = try MLModel.compileModel(at: path!)
let model = try MLModel.init(contentsOf: modelUrl)
self.model = try VNCoreMLModel(for: model)
} catch {
print("Error")
}
}
@objc(setOnClassification:) public func setOnClassification(onClassification: @escaping RCTBubblingEventBlock) {
self.onClassification = onClassification
}
}