Created
January 4, 2016 08:35
-
-
Save Katafalkas/5804f7d3a8aeda9450e3 to your computer and use it in GitHub Desktop.
testVC
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import UIKit | |
import AVFoundation | |
import GLKit | |
import OpenGLES | |
class ViewController: UIViewController, G8TesseractDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { | |
var tesseract: G8Tesseract! | |
var label: UILabel! | |
var imgV: UIImageView! | |
var session: AVCaptureSession! | |
var previewLayer: AVCaptureVideoPreviewLayer! | |
var glContext: EAGLContext! | |
var glView: GLKView! | |
var ciContext: CIContext! | |
var videoOutput: AVCaptureVideoDataOutput! | |
var stillCameraOutput: AVCaptureStillImageOutput! | |
override func viewWillAppear(animated: Bool) { | |
super.viewWillAppear(animated) | |
let foo = "com.example.camera.capture_session" | |
let sessionQueue = dispatch_queue_create(foo, DISPATCH_QUEUE_SERIAL) | |
dispatch_async(sessionQueue) { () -> Void in | |
self.session.startRunning() | |
} | |
self.updateRecognizedText(fromImage: UIImage(named: "s3")!) | |
} | |
override func viewDidLoad() { | |
super.viewDidLoad() | |
tesseract = G8Tesseract(language:"eng"); | |
tesseract.delegate = self; | |
tesseract.image = UIImage(named: "s3"); | |
tesseract.recognize(); | |
let authorizationStatus = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeVideo) | |
switch authorizationStatus { | |
case .NotDetermined: | |
print("NotDetermined") | |
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, | |
completionHandler: { (granted:Bool) -> Void in | |
if granted { | |
print("granted") | |
self.camera() | |
} | |
else { | |
print("not granted") | |
} | |
}) | |
case .Authorized: | |
print("AUTH") | |
camera() | |
case .Denied, .Restricted: | |
print("DEN, REST") | |
} | |
label = UILabel() | |
label.translatesAutoresizingMaskIntoConstraints = false | |
label.text = "FOoBAr" | |
label.textColor = UIColor.whiteColor() | |
label.textAlignment = .Center | |
label.numberOfLines = 0 | |
self.view.addSubview(label) | |
self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("H:|[label]|", options: [], metrics: nil, views: ["label": label])) | |
self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("V:[label]-20-|", options: [], metrics: nil, views: ["label": label])) | |
// imgV = UIImageView() | |
// imgV.translatesAutoresizingMaskIntoConstraints = false | |
// self.view.addSubview(imgV) | |
// | |
// self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("H:[imgV(100)]|", options: [], metrics: nil, views: ["imgV": imgV])) | |
// self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("V:[imgV(100)]|", options: [], metrics: nil, views: ["imgV": imgV])) | |
} | |
override func didReceiveMemoryWarning() { | |
super.didReceiveMemoryWarning() | |
// Dispose of any resources that can be recreated. | |
} | |
func camera() { | |
session = AVCaptureSession() | |
session.sessionPreset = AVCaptureSessionPresetPhoto | |
var backCameraDevice: AVCaptureDevice? | |
var frontCameraDevice: AVCaptureDevice? | |
let availableCameraDevices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) | |
for device in (availableCameraDevices as? [AVCaptureDevice] ?? []) { | |
print("Device: \(device.position.rawValue)") | |
if device.position == .Back { | |
backCameraDevice = device | |
} | |
else if device.position == .Front { | |
frontCameraDevice = device | |
} | |
} | |
do { | |
if let backCamera = backCameraDevice { | |
let backCameraInput = try AVCaptureDeviceInput(device: backCamera) as AVCaptureDeviceInput | |
if self.session.canAddInput(backCameraInput) { | |
print("meg") | |
self.session.addInput(backCameraInput) | |
} | |
} else { | |
print("MEH1") | |
} | |
} catch { | |
print("MEH") | |
} | |
// previewLayer = AVCaptureVideoPreviewLayer(session: session) | |
// previewLayer.frame = self.view.bounds | |
// self.view.layer.addSublayer(previewLayer) | |
glContext = EAGLContext(API: .OpenGLES2) | |
glView = GLKView(frame: self.view.frame, context: glContext) | |
// glView.translatesAutoresizingMaskIntoConstraints = false | |
glView.transform = CGAffineTransformMakeRotation(CGFloat(M_PI_2)) | |
glView.frame = self.view.bounds | |
self.view.addSubview(glView) | |
// self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("H:|[glView]|", options: [], metrics: nil, views: ["glView": glView])) | |
// self.view.addConstraints(NSLayoutConstraint.constraintsWithVisualFormat("V:|[glView]|", options: [], metrics: nil, views: ["glView": glView])) | |
ciContext = CIContext(EAGLContext: glContext) | |
videoOutput = AVCaptureVideoDataOutput() | |
videoOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)) | |
if session.canAddOutput(self.videoOutput) { | |
session.addOutput(self.videoOutput) | |
} | |
// stillCameraOutput = AVCaptureStillImageOutput() | |
// if self.session.canAddOutput(self.stillCameraOutput) { | |
// self.session.addOutput(self.stillCameraOutput) | |
// } | |
} | |
func updateRecognizedText(fromImage image: UIImage) { | |
print("updateRecognizedText: \(tesseract.progress)") | |
if tesseract.progress == 100 { | |
print("ZERO") | |
tesseract.image = image | |
tesseract.recognize() | |
label.text = tesseract.recognizedText | |
} | |
} | |
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate | |
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { | |
// print("Foo: \(NSDate().timeIntervalSince1970)") | |
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) | |
let image = CIImage(CVPixelBuffer: pixelBuffer!) | |
if glContext != EAGLContext.currentContext() { | |
EAGLContext.setCurrentContext(glContext) | |
} | |
glView.bindDrawable() | |
ciContext.drawImage(image, inRect:image.extent, fromRect: image.extent) | |
glView.display() | |
// let imageRecoQueue = dispatch_queue_create("imageReco", DISPATCH_QUEUE_SERIAL) | |
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) { () -> Void in | |
self.updateRecognizedText(fromImage: UIImage(CIImage: image)) | |
// self.updateRecognizedText(fromImage: UIImage(named: "s3")!) | |
// self.imgV.image = UIImage(CIImage: image) | |
} | |
} | |
// MARK: - G8TesseractDelegate | |
func progressImageRecognitionForTesseract(tesseract: G8Tesseract!) { | |
print("progress: \(tesseract.progress)") | |
} | |
func shouldCancelImageRecognitionForTesseract(tesseract: G8Tesseract!) -> Bool { | |
return false; // return true if you need to interrupt tesseract before it finishes | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment