我是靠谱客的博主 陶醉早晨,这篇文章主要介绍iOS 获取照相机实时预览图片,现在分享给大家,希望可以做个参考。

使用Objective-C实现
AVController.h头文件

复制代码
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
#import <UIKit/UIKit.h> #import <AVFoundation/AVFoundation.h> #import <CoreGraphics/CoreGraphics.h> #import <CoreVideo/CoreVideo.h> #import <CoreMedia/CoreMedia.h> @interface AVController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate> { AVCaptureSession *_captureSession; UIImageView *_imageView; CALayer *_customLayer; AVCaptureVideoPreviewLayer *_prevLayer; } @property (nonatomic, retain) AVCaptureSession *captureSession; @property (nonatomic, retain) UIImageView *imageView; @property (nonatomic, retain) CALayer *customLayer; @property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer; @end

AVController.m实现

复制代码
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
@implementation AVController //@synthesize captureSession = _captureSession; //@synthesize imageView = _imageView; //@synthesize customLayer = _customLayer; //@synthesize prevLayer = _prevLayer; - (void)viewDidLoad { [super viewDidLoad]; [self initCapture]; } /** * 初始化摄像头 */ - (void)initCapture { AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil]; AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; captureOutput.alwaysDiscardsLateVideoFrames = YES; // captureOutput.minFrameDuration = CMTimeMake(1, 10); dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL); [captureOutput setSampleBufferDelegate:self queue:queue]; NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [captureOutput setVideoSettings:videoSettings]; self.captureSession = [[AVCaptureSession alloc] init]; [self.captureSession addInput:captureInput]; [self.captureSession addOutput:captureOutput]; [self.captureSession startRunning]; self.customLayer = [CALayer layer]; CGRect frame = self.view.bounds; frame.origin.y = 64; frame.size.height = frame.size.height - 64; self.customLayer.frame = frame; self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1); self.customLayer.contentsGravity = kCAGravityResizeAspectFill; [self.view.layer addSublayer:self.customLayer]; self.imageView = [[UIImageView alloc] init]; self.imageView.frame = CGRectMake(0, 64, 100, 100); [self.view addSubview:self.imageView]; self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession]; self.prevLayer.frame = CGRectMake(100, 64, 100, 100); self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; [self.view.layer addSublayer: self.prevLayer]; UIButton *back = [[UIButton alloc]init]; [back setTitle:@"Back" forState:UIControlStateNormal]; [back setTitleColor:[UIColor redColor] forState:UIControlStateNormal]; [back sizeToFit]; frame = back.frame; frame.origin.y = 25; back.frame = frame; [self.view addSubview:back]; [back addTarget:self action:@selector(back:) forControlEvents:UIControlEventTouchUpInside]; } -(void)back:(id)sender{ [self dismissViewControllerAnimated:true completion:nil]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer,0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); id object = (__bridge id)newImage; // http://www.cnblogs.com/zzltjnh/p/3885012.html [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: object waitUntilDone:YES]; UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight]; // release CGImageRelease(newImage); [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES]; CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } - (void)viewDidUnload { [self.captureSession stopRunning]; self.imageView = nil; self.customLayer = nil; self.prevLayer = nil; } - (void) dealloc { // [self.captureSession release]; } @end

使用Swift实现

复制代码
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import UIKit class MyAVController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { var captureSession:AVCaptureSession! var customLayer:CALayer! var imageView:UIImageView! var previewLayer:AVCaptureVideoPreviewLayer! override func viewDidLoad() { let device = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) let captureInput = try? AVCaptureDeviceInput(device: device[0] as! AVCaptureDevice) let captureOutput = AVCaptureVideoDataOutput() captureOutput.alwaysDiscardsLateVideoFrames = true let queue = dispatch_queue_create("cameraQueue", nil) captureOutput.setSampleBufferDelegate(self, queue: queue) let key = kCVPixelBufferPixelFormatTypeKey as NSString let value = NSNumber(unsignedInt: kCVPixelFormatType_32BGRA) let videoSettings = [key: value] captureOutput.videoSettings = videoSettings captureSession = AVCaptureSession() captureSession.addInput(captureInput) captureSession.addOutput(captureOutput) captureSession.startRunning() customLayer = CALayer() var frame = self.view.bounds frame.origin.y = 64 frame.size.height = frame.size.height - 64 customLayer.frame = frame customLayer.transform = CATransform3DRotate(CATransform3DIdentity, CGFloat(M_PI) / 2.0, 0, 0, 1) customLayer.contentsGravity = kCAGravityResizeAspectFill self.view.layer.addSublayer(customLayer) imageView = UIImageView() imageView.frame = CGRectMake(0, 64, 100, 100) self.view.addSubview(imageView) previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.frame = CGRectMake(100, 64, 100, 100) previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill self.view.layer.addSublayer(previewLayer) let btn = UIButton() btn.setTitle("Back", forState: .Normal) btn.setTitleColor(UIColor.redColor(), forState: .Normal) btn.sizeToFit() frame = btn.frame frame.origin.y = 25 btn.frame = frame self.view.addSubview(btn) btn.addTarget(self, action: "back:", forControlEvents: .TouchUpInside) } func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer) let width = CVPixelBufferGetWidth(imageBuffer) let height = CVPixelBufferGetHeight(imageBuffer) let colorSpace = CGColorSpaceCreateDeviceRGB() // let bmpInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue) let newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue) let newImage = CGBitmapContextCreateImage(newContext) customLayer.performSelectorOnMainThread("setContents:", withObject: newImage, waitUntilDone: true) let image = UIImage(CGImage: newImage!, scale: 1.0, orientation: UIImageOrientation.Right) imageView.performSelectorOnMainThread("setImage:", withObject: image, waitUntilDone: true) CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } func back(sender:AnyObject) { self.dismissViewControllerAnimated(true, completion: nil) } override func viewWillDisappear(animated: Bool) { super.viewWillDisappear(animated) captureSession.stopRunning() } }

最后

以上就是陶醉早晨最近收集整理的关于iOS 获取照相机实时预览图片的全部内容,更多相关iOS内容请搜索靠谱客的其他文章。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(90)

评论列表共有 0 条评论

立即
投稿
返回
顶部