我是靠谱客的博主 淡淡楼房,这篇文章主要介绍swift 自定义蒙层相机,现在分享给大家,希望可以做个参考。

直接上效果
21567495249_.pic.jpg

自定义相机这个就没得说了集成的AVFoundation 百度都有

直接说蒙层吧
//绘制遮罩层

复制代码
1
2
3
4
5
6
7
8
9
10
11
12
13
14
func drawCoverView() { let view = UIView(frame: self.view.bounds) view.backgroundColor = .black view.alpha = 0.5 self.view.addSubview(view) let bpath = UIBezierPath(roundedRect: self.view.bounds,cornerRadius: 0) let bpath2 = UIBezierPath(roundedRect: CGRect(x: horizontally(viewWidth: photoWidth), y: verticalCentralization(viewHeight: photoHeigth), width: photoWidth, height: photoHeigth), cornerRadius: 0) bpath.append(bpath2.reversing()) let shapeLayer = CAShapeLayer.init() shapeLayer.path = bpath.cgPath view.layer.mask = shapeLayer }

关键点
创建一个全屏view
bpath 先绘制一个全屏的蒙层
bpath2 通过reversing()方法反向绘制透明的款
这个是最简单粗暴的玩法

复制代码
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
import UIKit import AVFoundation class ScannerVC: UIViewController { var back_but:UIButton? var photoBut:UIButton? var lightBut:UIButton? var callback: ((UIImage)->Void)? // //捕获设备,通常是前置摄像头,后置摄像头,麦克风(音频输入) var device:AVCaptureDevice? //AVCaptureDeviceInput 代表输入设备,他使用AVCaptureDevice 来初始化 var input:AVCaptureDeviceInput? //当启动摄像头开始捕获输入 var output:AVCaptureMetadataOutput? var ImageOutPut:AVCaptureStillImageOutput? //session:由他把输入输出结合在一起,并开始启动捕获设备(摄像头) var session:AVCaptureSession? //图像预览层,实时显示捕获的图像 var previewLayer:AVCaptureVideoPreviewLayer? var canCa = false var imageView:UIImageView? var image:UIImage? var maskLayer:CAShapeLayer?//半透明黑色遮罩 var effectiveRectLayer: CAShapeLayer?//有效区域框 var photoWidth = K_Screen_width-40 var photoHeigth = Int(Double(K_Screen_width-40) / 1.6) var focusView: UIView? //聚焦 var isLightOn = false override func viewDidLoad() { super.viewDidLoad() self.view.backgroundColor = .black drawCoverView() createView() canCa = canUserCamear() if(canCa){ customUI() customCamera() } } //绘制遮罩层 func drawCoverView() { let view = UIView(frame: self.view.bounds) view.backgroundColor = .black view.alpha = 0.5 self.view.addSubview(view) let bpath = UIBezierPath(roundedRect: self.view.bounds,cornerRadius: 0) let bpath2 = UIBezierPath(roundedRect: CGRect(x: horizontally(viewWidth: photoWidth), y: verticalCentralization(viewHeight: photoHeigth), width: photoWidth, height: photoHeigth), cornerRadius: 0) bpath.append(bpath2.reversing()) let shapeLayer = CAShapeLayer.init() shapeLayer.path = bpath.cgPath view.layer.mask = shapeLayer } //设置聚焦 func customUI(){ focusView = UIView(frame: CGRect(x: 0, y: 0, width: 70, height: 70)) focusView?.layer.borderWidth = 1.0 focusView?.layer.borderColor = UIColor.green.cgColor focusView?.backgroundColor = .clear focusView?.isHidden = true self.view.addSubview(focusView!) // 设置手势 let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusGesture(gesture:))) self.view.addGestureRecognizer(tapGesture) } @objc func focusGesture(gesture:UITapGestureRecognizer){ let point = gesture.location(in: gesture.view) focusAtPoint(point: point) } func focusAtPoint(point:CGPoint){ let size = self.view.bounds.size let focusPorint = CGPoint(x: point.y / size.height, y: 1-point.x/size.width) do{ try device?.lockForConfiguration() //焦点 if((self.device?.isFocusModeSupported(AVCaptureDevice.FocusMode.autoFocus))!){ self.device?.focusPointOfInterest = focusPorint self.device?.focusMode = AVCaptureDevice.FocusMode.autoFocus } //曝光 if((self.device?.isExposureModeSupported(AVCaptureDevice.ExposureMode.autoExpose))!){ self.device?.exposurePointOfInterest = focusPorint self.device?.exposureMode = AVCaptureDevice.ExposureMode.autoExpose } self.device?.unlockForConfiguration() focusView?.center = point focusView?.isHidden = false UIView.animate(withDuration: 0.3, animations: { self.focusView?.transform = CGAffineTransform(scaleX: 1.25, y: 1.25) }) { (finished) in UIView.animate(withDuration: 0.5, animations: { self.focusView?.transform = CGAffineTransform.identity }, completion: { (finished) in self.focusView?.isHidden = true }) } }catch{ return } } //相机初始化 func customCamera() { maskLayer = CAShapeLayer.init() self.view.backgroundColor = .white // 使用AVMediaTypeVideo 指明self.device代表视频,默认使用后置摄像头进行初始化 self.device = AVCaptureDevice.default(for: AVMediaType.video) //使用设备初始化输入 do { self.input = try AVCaptureDeviceInput(device: self.device!) }catch { print(error) return } // self.input = AVCaptureDeviceInput.init(device: self.device!) //生成输出对象 self.output = AVCaptureMetadataOutput.init() self.ImageOutPut = AVCaptureStillImageOutput.init() //生成会话,用来结合输入输出 self.session = AVCaptureSession.init() if((self.session?.canSetSessionPreset(AVCaptureSession.Preset.hd1920x1080))!){ self.session?.sessionPreset = AVCaptureSession.Preset.hd1920x1080; } if(self.session!.canAddInput(self.input!)){ self.session!.addInput(self.input!) } if(self.session!.canAddOutput(self.ImageOutPut!)){ self.session!.addOutput(self.ImageOutPut!) } //使用self.session,初始化预览层,self.session负责驱动input进行信息的采集,layer负责把图像渲染显示 self.previewLayer = AVCaptureVideoPreviewLayer.init(session: session!) self.previewLayer?.frame = CGRect(x: 0, y: 0, width: K_Screen_width, height: K_Screen_height) self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill self.view.layer.insertSublayer(self.previewLayer!, at: 0) //开始启动 self.session?.startRunning() do{ if(try self.device?.lockForConfiguration() == nil && self.device!.isFlashModeSupported(AVCaptureDevice.FlashMode.auto)){ self.device?.flashMode = AVCaptureDevice.FlashMode.auto } }catch{ print(error) } //自动白平衡 if(self.device!.isWhiteBalanceModeSupported(AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance)){ self.device?.whiteBalanceMode = AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance }else{ self.device?.unlockForConfiguration() } } func createView() { let topHight = Int(barHeight) + Int((self.navigationController?.navigationBar.frame.size.height)!); back_but = UIButton(type: .custom); let back=UIImage(named: "white_black"); back_but?.frame = CGRect(x: 20, y: CGFloat(topHight/2), width: (back?.size.width)!, height: (back?.size.height)!) back_but?.addTarget(self, action: #selector(backPage), for: .touchUpInside) back_but?.setBackgroundImage(back, for: .normal) photoBut = UIButton.init() photoBut?.addTarget(self, action: #selector(shutterCamera), for: .touchUpInside) photoBut?.setBackgroundImage(UIImage(named: "startBtn"), for: .normal) photoBut?.frame = CGRect(x: horizontally(viewWidth: 70), y:bottomY - 70, width: 70, height:70) // photoBut?.layer.cornerRadius = 35 self.view.addSubview(photoBut!) self.view.addSubview(back_but!) let labele = UILabel(); let width1 = ga_widthForComment(str: "请将身份证正面置入框中,注意光线", fontSize: 16) let height1 = ga_heightForComment(str: "请将身份证正面置入框中,注意光线", fontSize: 16, width: width1) labele.frame = CGRect(x: horizontally(viewWidth: Int(width1)), y: Int(K_Screen_height/2) - (photoHeigth/2) - Int(height1+10), width: Int(width1), height: Int(height1)) labele.text = "请将身份证正面置入框中,注意光线" labele.textColor = .white labele.font=UIFont.systemFont(ofSize: 16) self.view.addSubview(labele) let width2 = ga_widthForComment(str: "闪光灯", fontSize: 16) let height2 = ga_heightForComment(str: "闪光灯", fontSize: 16, width: width1) lightBut = UIButton(frame: CGRect(x: CGFloat(K_Screen_width - Int(20 + width2)), y: CGFloat(topHight/2), width: width2, height: height2)) lightBut?.setTitle("闪光灯", for: .normal) // lightBut?.titleLabel?.textColor = .groupTableViewBackground lightBut?.setTitleColor(.groupTableViewBackground, for: .normal) lightBut?.titleLabel?.font = UIFont.systemFont(ofSize: 16) lightBut?.addTarget(self, action: #selector(light), for: .touchUpInside) self.view.addSubview(lightBut!) //边框线条。start let view2 = UIView(frame:CGRect(x: 18, y: Int(K_Screen_height/2) - (photoHeigth/2) - 4, width: 32, height: 2)) view2.backgroundColor = .white self.view.addSubview(view2) let view3 = UIView(frame:CGRect(x: K_Screen_width - 50, y: Int(K_Screen_height/2) - (photoHeigth/2) - 4, width: 32, height: 2)) view3.backgroundColor = .white self.view.addSubview(view3) let view4 = UIView(frame:CGRect(x: 18, y: Int(K_Screen_height/2) + (photoHeigth/2) + 2, width: 32, height: 2)) view4.backgroundColor = .white self.view.addSubview(view4) let view5 = UIView(frame:CGRect(x: K_Screen_width - 50, y: Int(K_Screen_height/2) + (photoHeigth/2) + 2, width: 32, height: 2)) view5.backgroundColor = .white self.view.addSubview(view5) let view6 = UIView(frame:CGRect(x: 16, y: Int(K_Screen_height/2) - (photoHeigth/2)-4, width: 2, height: 32)) view6.backgroundColor = .white self.view.addSubview(view6) let view7 = UIView(frame:CGRect(x: K_Screen_width - 18, y: Int(K_Screen_height/2) - (photoHeigth/2)-4, width: 2, height: 32)) view7.backgroundColor = .white self.view.addSubview(view7) let view8 = UIView(frame:CGRect(x: 16, y: Int(K_Screen_height/2) + (photoHeigth/2)-28, width: 2, height: 32)) view8.backgroundColor = .white self.view.addSubview(view8) let view9 = UIView(frame:CGRect(x: K_Screen_width - 18, y: Int(K_Screen_height/2) + (photoHeigth/2)-28, width: 2, height: 32)) view9.backgroundColor = .white self.view.addSubview(view9) //--end--- } @objc func backPage(){ self.navigationController?.popViewController(animated: true); } //相机权限 func canUserCamear() -> Bool { let authStatus = AVCaptureDevice.authorizationStatus(for: AVMediaType.video) if(authStatus == AVAuthorizationStatus.denied){ // let alertView = UIAlertView.init(title: "请打开相机权限", message: "设置-隐私-相机", delegate: self, cancelButtonTitle: "确定",otherButtonTitles: "取消"); // alertView.show() let alertController = UIAlertController(title: " 请打开相机权限", message: "设置-隐私-相机", preferredStyle: .alert) let cancelAction = UIAlertAction(title: "取消", style: .cancel) { (UIAlertAction) in self.backPage() } let okAction = UIAlertAction(title: "确定", style: .default) { (UIAlertAction) in let url = URL(string: UIApplication.openSettingsURLString) if (UIApplication.shared.canOpenURL(url!)){ UIApplication.shared.openURL(url!) } } alertController.addAction(cancelAction) alertController.addAction(okAction) self.present(alertController, animated: true, completion: nil) return false }else{ return true } return true } @objc func light(){ do{ try device?.lockForConfiguration() if(!isLightOn){ device?.torchMode = AVCaptureDevice.TorchMode.on isLightOn = true // self.lightBut?.titleLabel?.textColor = .green lightBut?.setTitleColor(.green, for: .normal) }else{ device?.torchMode = AVCaptureDevice.TorchMode.off isLightOn = false // self.lightBut?.titleLabel?.textColor = .groupTableViewBackground lightBut?.setTitleColor(.groupTableViewBackground, for: .normal) } device?.unlockForConfiguration() }catch{ return } } @objc func shutterCamera(){ let videoConnection = self.ImageOutPut?.connection(with: AVMediaType.video) videoConnection?.videoOrientation = AVCaptureVideoOrientation.portrait if(!(videoConnection != nil)){ return } self.ImageOutPut?.captureStillImageAsynchronously(from: videoConnection!, completionHandler: { (imageDataSampleBuffer, error) in if(imageDataSampleBuffer == nil){ return } let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!) self.image = UIImage.init(data: imageData!) self.session?.stopRunning() //计算比例 let aspectWidth = self.image!.size.width / CGFloat(K_Screen_width) let aspectHeight = self.image!.size.height / CGFloat(K_Screen_height) // 图片绘制区域 var scaledImageRect = CGRect.zero scaledImageRect.size.width = CGFloat(self.photoWidth) * CGFloat(aspectWidth) scaledImageRect.size.height = CGFloat(self.photoHeigth) * CGFloat(aspectHeight) scaledImageRect.origin.x = CGFloat(horizontally(viewWidth: self.photoWidth)) * CGFloat(aspectWidth) scaledImageRect.origin.y = CGFloat(verticalCentralization(viewHeight: self.photoHeigth)) * CGFloat(aspectHeight) let i = self.imageFromImage(image: self.fixOrientation(image: self.image!), rect: scaledImageRect) self.imageView = UIImageView(frame: CGRect(x: horizontally(viewWidth: self.photoWidth), y: verticalCentralization(viewHeight: self.photoHeigth), width: self.photoWidth, height: self.photoHeigth)) self.imageView?.contentMode = UIView.ContentMode.scaleAspectFill // self.view.insertSubview(self.imageView!, belowSubview: but) self.imageView?.layer.masksToBounds = true self.imageView?.image = i self.callback?(i) self.backPage() // self.view.addSubview(self.imageView!) }) } func scaled(to newSize: CGSize,size:CGSize) -> UIImage { //计算比例 let aspectWidth = newSize.width/size.width let aspectHeight = newSize.height/size.height let aspectRatio = max(aspectWidth, aspectHeight) //图片绘制区域 var scaledImageRect = CGRect.zero scaledImageRect.size.width = size.width * aspectRatio scaledImageRect.size.height = size.height * aspectRatio scaledImageRect.origin.x = 0 scaledImageRect.origin.y = 0 //绘制并获取最终图片 UIGraphicsBeginImageContextWithOptions(newSize, false, 0.0)//图片不失真 // drem(in: scaledImageRect) let scaledImage = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() return scaledImage! } /** *从图片中按指定的位置大小截取图片的一部分 * UIImage image 原始的图片 * CGRect rect 要截取的区域 */ func imageFromImage(image:UIImage,rect:CGRect) -> UIImage { //将UIImage转换成CGImageRef let sourceImageRef = image.cgImage //按照给定的矩形区域进行剪裁 let newImageRef = sourceImageRef?.cropping(to: rect) let newImage = UIImage.init(cgImage: newImageRef!) return newImage } // //按下的效果 // -(void)touchDown{ // self.saveBtn.backgroundColor = [UIColor colorFromHexValue:0x9B0000]; // } // // //按下拖出按钮松手还原 // -(void)touchUpOutside{ // self.saveBtn.backgroundColor = [UIColor colorFromHexValue:0xFF2741]; // } func fixOrientation(image:UIImage) -> UIImage { if image.imageOrientation == .up { return image } var transform = CGAffineTransform.identity switch image.imageOrientation { case .down, .downMirrored: transform = transform.translatedBy(x: image.size.width, y: image.size.height) transform = transform.rotated(by: .pi) break case .left, .leftMirrored: transform = transform.translatedBy(x: image.size.width, y: 0) transform = transform.rotated(by: .pi / 2) break case .right, .rightMirrored: transform = transform.translatedBy(x: 0, y: image.size.height) transform = transform.rotated(by: -.pi / 2) break default: break } switch image.imageOrientation { case .upMirrored, .downMirrored: transform = transform.translatedBy(x: image.size.width, y: 0) transform = transform.scaledBy(x: -1, y: 1) break case .leftMirrored, .rightMirrored: transform = transform.translatedBy(x: image.size.height, y: 0); transform = transform.scaledBy(x: -1, y: 1) break default: break } let ctx = CGContext(data: nil, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: image.cgImage!.bitsPerComponent, bytesPerRow: 0, space: image.cgImage!.colorSpace!, bitmapInfo: image.cgImage!.bitmapInfo.rawValue) ctx?.concatenate(transform) switch image.imageOrientation { case .left, .leftMirrored, .right, .rightMirrored: ctx?.draw(image.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(image.size.height), height: CGFloat(image.size.width))) break default: ctx?.draw(image.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(image.size.width), height: CGFloat(image.size.height))) break } let cgimg: CGImage = (ctx?.makeImage())! let img = UIImage(cgImage: cgimg) return img } }

跳转到这个相机页面并返回中间的图片

复制代码
1
2
3
4
5
6
7
8
let vc = ScannerVC() vc.callback = { image in print(image) self.idImage = image } self.navigationController?.pushViewController(vc, animated: true);

额外说一个坑 自定义相机拍的照片 赋到imageview 是正常的 时间上是旋转了90度的 裁剪区域图片要注意下 fixOrientation这个方法是处理了旋转图片的
如果这个文章对你有帮助就点下赞吧
源码地址

最后

以上就是淡淡楼房最近收集整理的关于swift 自定义蒙层相机的全部内容,更多相关swift内容请搜索靠谱客的其他文章。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(69)

评论列表共有 0 条评论

立即
投稿
返回
顶部