iOS 获取照相机实时预览图片
使用Objective-C实现
AVController.h头文件
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>@interface AVController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate> {AVCaptureSession *_captureSession;UIImageView *_imageView;CALayer *_customLayer;AVCaptureVideoPreviewLayer *_prevLayer;
}@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;@end
AVController.m实现
@implementation AVController//@synthesize captureSession = _captureSession;
//@synthesize imageView = _imageView;
//@synthesize customLayer = _customLayer;
//@synthesize prevLayer = _prevLayer;- (void)viewDidLoad
{[super viewDidLoad];[self initCapture];
}/*** 初始化摄像头*/
- (void)initCapture {AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];captureOutput.alwaysDiscardsLateVideoFrames = YES;// captureOutput.minFrameDuration = CMTimeMake(1, 10);dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);[captureOutput setSampleBufferDelegate:self queue:queue];NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];[captureOutput setVideoSettings:videoSettings];self.captureSession = [[AVCaptureSession alloc] init];[self.captureSession addInput:captureInput];[self.captureSession addOutput:captureOutput];[self.captureSession startRunning];self.customLayer = [CALayer layer];CGRect frame = self.view.bounds;frame.origin.y = 64;frame.size.height = frame.size.height - 64;self.customLayer.frame = frame;self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);self.customLayer.contentsGravity = kCAGravityResizeAspectFill;[self.view.layer addSublayer:self.customLayer];self.imageView = [[UIImageView alloc] init];self.imageView.frame = CGRectMake(0, 64, 100, 100);[self.view addSubview:self.imageView];self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession];self.prevLayer.frame = CGRectMake(100, 64, 100, 100);self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; [self.view.layer addSublayer: self.prevLayer];UIButton *back = [[UIButton alloc]init];[back setTitle:@"Back" forState:UIControlStateNormal];[back setTitleColor:[UIColor redColor] forState:UIControlStateNormal];[back sizeToFit];frame = back.frame;frame.origin.y = 25;back.frame = frame;[self.view addSubview:back];[back addTarget:self action:@selector(back:) forControlEvents:UIControlEventTouchUpInside];
}-(void)back:(id)sender{[self dismissViewControllerAnimated:true completion:nil];
}- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBufferfromConnection:(AVCaptureConnection *)connection
{CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);CVPixelBufferLockBaseAddress(imageBuffer,0);uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);size_t width = CVPixelBufferGetWidth(imageBuffer);size_t height = CVPixelBufferGetHeight(imageBuffer);CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);CGImageRef newImage = CGBitmapContextCreateImage(newContext);CGContextRelease(newContext);CGColorSpaceRelease(colorSpace);id object = (__bridge id)newImage;// http://www.cnblogs.com/zzltjnh/p/3885012.html[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: object waitUntilDone:YES];UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];// releaseCGImageRelease(newImage);[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}- (void)viewDidUnload {[self.captureSession stopRunning];self.imageView = nil;self.customLayer = nil;self.prevLayer = nil;
}- (void) dealloc {
// [self.captureSession release];
}@end
使用Swift实现
import UIKitclass MyAVController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {var captureSession:AVCaptureSession!var customLayer:CALayer!var imageView:UIImageView!var previewLayer:AVCaptureVideoPreviewLayer!override func viewDidLoad() {let device = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)let captureInput = try? AVCaptureDeviceInput(device: device[0] as! AVCaptureDevice)let captureOutput = AVCaptureVideoDataOutput()captureOutput.alwaysDiscardsLateVideoFrames = truelet queue = dispatch_queue_create("cameraQueue", nil)captureOutput.setSampleBufferDelegate(self, queue: queue)let key = kCVPixelBufferPixelFormatTypeKey as NSStringlet value = NSNumber(unsignedInt: kCVPixelFormatType_32BGRA)let videoSettings = [key: value]captureOutput.videoSettings = videoSettingscaptureSession = AVCaptureSession()captureSession.addInput(captureInput)captureSession.addOutput(captureOutput)captureSession.startRunning()customLayer = CALayer()var frame = self.view.boundsframe.origin.y = 64frame.size.height = frame.size.height - 64customLayer.frame = framecustomLayer.transform = CATransform3DRotate(CATransform3DIdentity, CGFloat(M_PI) / 2.0, 0, 0, 1)customLayer.contentsGravity = kCAGravityResizeAspectFillself.view.layer.addSublayer(customLayer)imageView = UIImageView()imageView.frame = CGRectMake(0, 64, 100, 100)self.view.addSubview(imageView)previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)previewLayer.frame = CGRectMake(100, 64, 100, 100)previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFillself.view.layer.addSublayer(previewLayer)let btn = UIButton()btn.setTitle("Back", forState: .Normal)btn.setTitleColor(UIColor.redColor(), forState: .Normal)btn.sizeToFit()frame = btn.frameframe.origin.y = 25btn.frame = frameself.view.addSubview(btn)btn.addTarget(self, action: "back:", forControlEvents: .TouchUpInside)}func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!CVPixelBufferLockBaseAddress(imageBuffer, 0)let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)let width = CVPixelBufferGetWidth(imageBuffer)let height = CVPixelBufferGetHeight(imageBuffer)let colorSpace = CGColorSpaceCreateDeviceRGB()
// let bmpInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue)let newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue|CGBitmapInfo.ByteOrder32Little.rawValue)let newImage = CGBitmapContextCreateImage(newContext)customLayer.performSelectorOnMainThread("setContents:", withObject: newImage, waitUntilDone: true)let image = UIImage(CGImage: newImage!, scale: 1.0, orientation: UIImageOrientation.Right)imageView.performSelectorOnMainThread("setImage:", withObject: image, waitUntilDone: true)CVPixelBufferUnlockBaseAddress(imageBuffer, 0);}func back(sender:AnyObject) {self.dismissViewControllerAnimated(true, completion: nil)}override func viewWillDisappear(animated: Bool) {super.viewWillDisappear(animated)captureSession.stopRunning()}
}
iOS 获取照相机实时预览图片相关推荐
- 关于input file img实时预览获取文件路径的问题
要做图片上传嘛,肯定要做实时预览.贴代码算了: <div class="btn"> <input type="button" name=&qu ...
- 图片上传实时预览效果
一. 创建一个文件上传的input框 id为doc /这是图片上传的input框 <input type='file' id='doc' name='pic' style='width:60px ...
- 安卓IOS客户端调试webview页面的方法,支持实时预览
调试方式 手机模拟器调试 真实手机调试,安卓和IOS都可以 注意事项 混合开发的时候经常要用到调试功能,调试webview方法有很多种: 直接让客户端把地址修改成本机ip地址,客户端重新出调试包 通过 ...
- 最好用的设计稿实时预览工具【Sketch、Android App、iOS App】
做ui设计的朋友对于效果图的实时预览需求非常高,在这里推荐两款软件,首先说一下MAC专属: 1.Sketch Mirror Sketch Mirror 在IOS商店可以搜索直接下载,与MAC 下 Sk ...
- wx图片的相机相册获取,预览图片,图片保存,图片的信息
wxml <view class="title">图片管理</view> <view class="demo-box"> & ...
- 微信页面通过LocalID预览图片,getlocallmgdata
微信 使用chooseImage api返回的localld以如:"img src=wxLocalResource://imageid987654321123456789"的方式 ...
- 海康威视摄像机Java SDK拉流(二)开启关闭实时预览
上一篇:海康威视Java SDK拉流(一)初始化SDK 本篇介绍海康威视摄像机通过SDK开启关闭实时预览接口 下篇介绍实时预览的回调函数及解码库 测试环境: 系统:Centos 7 SDK:设备网络S ...
- EasyRTMP:RTMP推流海康威视实时预览回调PS流用EasyRTMP向RTMP服务器推流中视频数据处理的代码
在上一篇方案<EasyRTMP结合海康HCNetSDK获取海康摄像机H.264实时流并转化成为RTMP直播推流(附源码)>中我们介绍了将海康安防摄像机进行互联网直播的整体方案流程,其中有一 ...
- oracle如何上传图片,js实现上传图片之上传前预览图片
上传图片对图片进行一下预览,可以了解图片上传后大概会是什么样子,此功能用js实现,然后在fileupload控件的change事件中调用,这样当用fileupload选择完图片以后,图片就会自动显示出 ...
最新文章
- 将EditText的光标定位到字符的最后面
- mysql-proxy完成mysql读写分离
- 单/双中括号与测试条件
- 【IOS下载】Cisco IOS下载
- 为什么程序员做测试其实是有优势的?这是我听过最....的话
- cvs 文件如何解析?
- 如何解决 Nginx 端口映射到外网后访问地址端口丢失的问题
- 你真的懂病毒式营销吗
- c语言:编辑一个有趣的死循环程序并对其修改,仅仅是一个“=”号的差别
- 【离散数学】图论 第七章(3) 图的矩阵表示(邻接矩阵、可达矩阵、传递闭包求解算法)
- com.android.yf.idp,QQ轻聊版-com.tencent.qqlite_v3.3.0_apkpure.apk
- OA无纸化办公系统对公司管理的作用
- 电视机顶盒搜台原理和方法简析
- 计算机专业应届生简历表格,计算机专业应届生个人简历模板
- Spring Aop 5种通知写法及参数JoinPoint详解
- 15个国外最好的电子商务建站程序
- Tensorflow小白实战系列
- 《windows 程序设计》中的翻译错误----学习笔记
- 解决escript: exception error: undefined function rabbitmqctl_escript:main/1问题以及如何安装Erlang和RabbitMq
- Live预告 | 松鼠AI首席科学家:在教育这个超千亿市场中,AI究竟扮演了什么样的角色?... 1