概述
使用Objective-C实现
AVController.h头文件
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
@interface AVController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
@end
AVController.m实现
@implementation AVController
//@synthesize captureSession = _captureSession;
//@synthesize imageView = _imageView;
//@synthesize customLayer = _customLayer;
//@synthesize prevLayer = _prevLayer;
- (void)viewDidLoad
{
[super viewDidLoad];
[self initCapture];
}
/**
* 初始化摄像头
*/
- (void)initCapture {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
// captureOutput.minFrameDuration = CMTimeMake(1, 10);
dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
CGRect frame = self.view.bounds;
frame.origin.y = 64;
frame.size.height = frame.size.height - 64;
self.customLayer.frame = frame;
self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = CGRectMake(0, 64, 100, 100);
[self.view addSubview:self.imageView];
self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(100, 64, 100, 100);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
UIButton *back = [[UIButton alloc]init];
[back setTitle:@"Back" forState:UIControlStateNormal];
[back setTitleColor:[UIColor redColor] forState:UIControlStateNormal];
[back sizeToFit];
frame = back.frame;
frame.origin.y = 25;
back.frame = frame;
[self.view addSubview:back];
[back addTarget:self action:@selector(back:) forControlEvents:UIControlEventTouchUpInside];
}
-(void)back:(id)sender{
[self dismissViewControllerAnimated:true completion:nil];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
id object = (__bridge id)newImage;
// http://www.cnblogs.com/zzltjnh/p/3885012.html
[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: object waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
// release
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
- (void)viewDidUnload {
[self.captureSession stopRunning];
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}
- (void) dealloc {
// [self.captureSession release];
}
@end
使用Swift实现
import UIKit
class MyAVController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var captureSession:AVCaptureSession!
var customLayer:CALayer!
var imageView:UIImageView!
var previewLayer:AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
let device = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
let captureInput = try? AVCaptureDeviceInput(device: device[0] as! AVCaptureDevice)
let captureOutput = AVCaptureVideoDataOutput()
captureOutput.alwaysDiscardsLateVideoFrames = true
let queue = dispatch_queue_create("cameraQueue", nil)
captureOutput.setSampleBufferDelegate(self, queue: queue)
let key = kCVPixelBufferPixelFormatTypeKey as NSString
let value = NSNumber(unsignedInt: kCVPixelFormatType_32BGRA)
let videoSettings = [key: value]
captureOutput.videoSettings = videoSettings
captureSession = AVCaptureSession()
captureSession.addInput(captureInput)
captureSession.addOutput(captureOutput)
captureSession.startRunning()
customLayer = CALayer()
var frame = self.view.bounds
frame.origin.y = 64
frame.size.height = frame.size.height - 64
customLayer.frame = frame
customLayer.transform = CATransform3DRotate(CATransform3DIdentity, CGFloat(M_PI) / 2.0, 0, 0, 1)
customLayer.contentsGravity = kCAGravityResizeAspectFill
self.view.layer.addSublayer(customLayer)
imageView = UIImageView()
imageView.frame = CGRectMake(0, 64, 100, 100)
self.view.addSubview(imageView)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = CGRectMake(100, 64, 100, 100)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
let btn = UIButton()
btn.setTitle("Back", forState: .Normal)
btn.setTitleColor(UIColor.redColor(), forState: .Normal)
btn.sizeToFit()
frame = btn.frame
frame.origin.y = 25
btn.frame = frame
self.view.addSubview(btn)
btn.addTarget(self, action: "back:", forControlEvents: .TouchUpInside)
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
// let bmpInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue)
let newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue)
let newImage = CGBitmapContextCreateImage(newContext)
customLayer.performSelectorOnMainThread("setContents:", withObject: newImage, waitUntilDone: true)
let image = UIImage(CGImage: newImage!, scale: 1.0, orientation: UIImageOrientation.Right)
imageView.performSelectorOnMainThread("setImage:", withObject: image, waitUntilDone: true)
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
func back(sender:AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
captureSession.stopRunning()
}
}
最后
以上就是陶醉早晨为你收集整理的iOS 获取照相机实时预览图片的全部内容,希望文章能够帮你解决iOS 获取照相机实时预览图片所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
发表评论 取消回复