网上有很多自定义相机的例子,这里只是我临时写的一个小demo,仅供参考:
用到了下面几个库:
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>

在使用的时候需要在Info.plist中把相关权限写进去:
Privacy - Microphone Usage Description
Privacy - Photo Library Usage Description
Privacy - Camera Usage Description

我在写这个demo时,是按照微信的样式写的,同样是点击拍照、长按录制视频,视频录制完直接进行播放,这里封装了一个简易的播放器:

m文件

#import "HAVPlayer.h"
#import <AVFoundation/AVFoundation.h>@interface HAVPlayer ()@property (nonatomic,strong) AVPlayer *player;//播放器对象@end@implementation HAVPlayer/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {// Drawing code
}
*/- (instancetype)initWithFrame:(CGRect)frame withShowInView:(UIView *)bgView url:(NSURL *)url {if (self = [self initWithFrame:frame]) {//创建播放器层AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];playerLayer.frame = self.bounds;[self.layer addSublayer:playerLayer];if (url) {self.videoUrl = url;}[bgView addSubview:self];}return self;
}- (void)dealloc {[self removeAvPlayerNtf];[self stopPlayer];self.player = nil;
}- (AVPlayer *)player {if (!_player) {_player = [AVPlayer playerWithPlayerItem:[self getAVPlayerItem]];[self addAVPlayerNtf:_player.currentItem];}return _player;
}- (AVPlayerItem *)getAVPlayerItem {AVPlayerItem *playerItem=[AVPlayerItem playerItemWithURL:self.videoUrl];return playerItem;
}- (void)setVideoUrl:(NSURL *)videoUrl {_videoUrl = videoUrl;[self removeAvPlayerNtf];[self nextPlayer];
}- (void)nextPlayer {[self.player seekToTime:CMTimeMakeWithSeconds(0, _player.currentItem.duration.timescale)];[self.player replaceCurrentItemWithPlayerItem:[self getAVPlayerItem]];[self addAVPlayerNtf:self.player.currentItem];if (self.player.rate == 0) {[self.player play];}
}- (void) addAVPlayerNtf:(AVPlayerItem *)playerItem {//监控状态属性[playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];//监控网络加载情况属性[playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:self.player.currentItem];
}- (void)removeAvPlayerNtf {AVPlayerItem *playerItem = self.player.currentItem;[playerItem removeObserver:self forKeyPath:@"status"];[playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"];[[NSNotificationCenter defaultCenter] removeObserver:self];
}- (void)stopPlayer {if (self.player.rate == 1) {[self.player pause];//如果在播放状态就停止}
}/***  通过KVO监控播放器状态**  @param keyPath 监控属性*  @param object  监视器*  @param change  状态改变*  @param context 上下文*/
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{AVPlayerItem *playerItem = object;if ([keyPath isEqualToString:@"status"]) {AVPlayerStatus status= [[change objectForKey:@"new"] intValue];if(status==AVPlayerStatusReadyToPlay){NSLog(@"正在播放...,视频总长度:%.2f",CMTimeGetSeconds(playerItem.duration));}}else if([keyPath isEqualToString:@"loadedTimeRanges"]){NSArray *array=playerItem.loadedTimeRanges;CMTimeRange timeRange = [array.firstObject CMTimeRangeValue];//本次缓冲时间范围float startSeconds = CMTimeGetSeconds(timeRange.start);float durationSeconds = CMTimeGetSeconds(timeRange.duration);NSTimeInterval totalBuffer = startSeconds + durationSeconds;//缓冲总长度NSLog(@"共缓冲:%.2f",totalBuffer);}
}- (void)playbackFinished:(NSNotification *)ntf {Plog(@"视频播放完成");[self.player seekToTime:CMTimeMake(0, 1)];[self.player play];
}@end

另外微信下面的按钮长按会出现圆弧时间条:

m文件

#import "HProgressView.h"@interface HProgressView ()/***  进度值0-1.0之间*/
@property (nonatomic,assign)CGFloat progressValue;@property (nonatomic, assign) CGFloat currentTime;@end@implementation HProgressView// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {// Drawing codeCGContextRef ctx = UIGraphicsGetCurrentContext();//获取上下文Plog(@"width = %f",self.frame.size.width);CGPoint center = CGPointMake(self.frame.size.width/2.0, self.frame.size.width/2.0);  //设置圆心位置CGFloat radius = self.frame.size.width/2.0-5;  //设置半径CGFloat startA = - M_PI_2;  //圆起点位置CGFloat endA = -M_PI_2 + M_PI * 2 * _progressValue;  //圆终点位置UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:center radius:radius startAngle:startA endAngle:endA clockwise:YES];CGContextSetLineWidth(ctx, 10); //设置线条宽度[[UIColor whiteColor] setStroke]; //设置描边颜色CGContextAddPath(ctx, path.CGPath); //把路径添加到上下文CGContextStrokePath(ctx);  //渲染
}- (void)setTimeMax:(NSInteger)timeMax {_timeMax = timeMax;self.currentTime = 0;self.progressValue = 0;[self setNeedsDisplay];self.hidden = NO;[self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
}- (void)clearProgress {_currentTime = _timeMax;self.hidden = YES;
}- (void)startProgress {_currentTime += 0.1;if (_timeMax > _currentTime) {_progressValue = _currentTime/_timeMax;Plog(@"progress = %f",_progressValue);[self setNeedsDisplay];[self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];}if (_timeMax <= _currentTime) {[self clearProgress];}
}@end

接下来就是相机的控制器了,由于是临时写的,所以用的xib,大家不要直接使用,直接上m文件代码吧:

#import "HVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "HAVPlayer.h"
#import "HProgressView.h"
#import <Foundation/Foundation.h>
#import <AssetsLibrary/AssetsLibrary.h>typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
@interface HVideoViewController ()<AVCaptureFileOutputRecordingDelegate>//轻触拍照,按住摄像
@property (strong, nonatomic) IBOutlet UILabel *labelTipTitle;//视频输出流
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
//图片输出流
//@property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流
//负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;
//后台任务标识
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;@property (assign,nonatomic) UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier;@property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //聚焦光标//负责输入和输出设备之间的数据传递
@property(nonatomic)AVCaptureSession *session;//图像预览层,实时显示捕获的图像
@property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer;@property (strong, nonatomic) IBOutlet UIButton *btnBack;
//重新录制
@property (strong, nonatomic) IBOutlet UIButton *btnAfresh;
//确定
@property (strong, nonatomic) IBOutlet UIButton *btnEnsure;
//摄像头切换
@property (strong, nonatomic) IBOutlet UIButton *btnCamera;@property (strong, nonatomic) IBOutlet UIImageView *bgView;
//记录录制的时间 默认最大60秒
@property (assign, nonatomic) NSInteger seconds;//记录需要保存视频的路径
@property (strong, nonatomic) NSURL *saveVideoUrl;//是否在对焦
@property (assign, nonatomic) BOOL isFocus;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *afreshCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *ensureCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *backCenterX;//视频播放
@property (strong, nonatomic) HAVPlayer *player;@property (strong, nonatomic) IBOutlet HProgressView *progressView;//是否是摄像 YES 代表是录制  NO 表示拍照
@property (assign, nonatomic) BOOL isVideo;@property (strong, nonatomic) UIImage *takeImage;
@property (strong, nonatomic) UIImageView *takeImageView;
@property (strong, nonatomic) IBOutlet UIImageView *imgRecord;@end//时间大于这个就是视频,否则为拍照
#define TimeMax 1@implementation HVideoViewController-(void)dealloc{[self removeNotification];}- (void)viewDidLoad {[super viewDidLoad];// Do any additional setup after loading the view.UIImage *image = [UIImage imageNamed:@"sc_btn_take.png"];self.backCenterX.constant = -(SCREEN_WIDTH/2/2)-image.size.width/2/2;self.progressView.layer.cornerRadius = self.progressView.frame.size.width/2;if (self.HSeconds == 0) {self.HSeconds = 60;}[self performSelector:@selector(hiddenTipsLabel) withObject:nil afterDelay:4];
}- (void)hiddenTipsLabel {self.labelTipTitle.hidden = YES;
}- (void)didReceiveMemoryWarning {[super didReceiveMemoryWarning];// Dispose of any resources that can be recreated.
}- (void)viewWillAppear:(BOOL)animated {[super viewWillAppear:animated];[[UIApplication sharedApplication] setStatusBarHidden:YES];[self customCamera];[self.session startRunning];
}-(void)viewDidAppear:(BOOL)animated{[super viewDidAppear:animated];
}-(void)viewDidDisappear:(BOOL)animated{[super viewDidDisappear:animated];[self.session stopRunning];
}- (void)viewWillDisappear:(BOOL)animated {[super viewWillDisappear:animated];[[UIApplication sharedApplication] setStatusBarHidden:NO];
}- (void)customCamera {//初始化会话,用来结合输入输出self.session = [[AVCaptureSession alloc] init];//设置分辨率 (设备支持的最高分辨率)if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]) {self.session.sessionPreset = AVCaptureSessionPresetHigh;}//取得后置摄像头AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//添加一个音频输入设备AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];//初始化输入设备NSError *error = nil;self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];if (error) {Plog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);return;}//添加音频error = nil;AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];if (error) {NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);return;}//输出对象self.captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];//视频输出//将输入设备添加到会话if ([self.session canAddInput:self.captureDeviceInput]) {[self.session addInput:self.captureDeviceInput];[self.session addInput:audioCaptureDeviceInput];//设置视频防抖AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];if ([connection isVideoStabilizationSupported]) {connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;}}//将输出设备添加到会话 (刚开始 是照片为输出对象)if ([self.session canAddOutput:self.captureMovieFileOutput]) {[self.session addOutput:self.captureMovieFileOutput];}//创建视频预览层,用于实时展示摄像头状态self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];self.previewLayer.frame = self.view.bounds;//CGRectMake(0, 0, self.view.width, self.view.height);self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式[self.bgView.layer addSublayer:self.previewLayer];[self addNotificationToCaptureDevice:captureDevice];[self addGenstureRecognizer];
}- (IBAction)onCancelAction:(UIButton *)sender {[self dismissViewControllerAnimated:YES completion:^{[Utility hideProgressDialog];}];
}- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {if ([[touches anyObject] view] == self.imgRecord) {Plog(@"开始录制");//根据设备输出获得连接AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];//根据连接取得设备输出的数据if (![self.captureMovieFileOutput isRecording]) {//如果支持多任务则开始多任务if ([[UIDevice currentDevice] isMultitaskingSupported]) {self.backgroundTaskIdentifier = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];}if (self.saveVideoUrl) {[[NSFileManager defaultManager] removeItemAtURL:self.saveVideoUrl error:nil];}//预览图层和视频方向保持一致connection.videoOrientation = [self.previewLayer connection].videoOrientation;NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];NSLog(@"save path is :%@",outputFielPath);NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];NSLog(@"fileUrl:%@",fileUrl);[self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];} else {[self.captureMovieFileOutput stopRecording];}}
}- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {if ([[touches anyObject] view] == self.imgRecord) {Plog(@"结束触摸");if (!self.isVideo) {[self performSelector:@selector(endRecord) withObject:nil afterDelay:0.3];} else {[self endRecord];}}
}- (void)endRecord {[self.captureMovieFileOutput stopRecording];//停止录制
}- (IBAction)onAfreshAction:(UIButton *)sender {Plog(@"重新录制");[self recoverLayout];
}- (IBAction)onEnsureAction:(UIButton *)sender {Plog(@"确定 这里进行保存或者发送出去");if (self.saveVideoUrl) {WS(weakSelf)[Utility showProgressDialogText:@"视频处理中..."];ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:self.saveVideoUrl completionBlock:^(NSURL *assetURL, NSError *error) {Plog(@"outputUrl:%@",weakSelf.saveVideoUrl);[[NSFileManager defaultManager] removeItemAtURL:weakSelf.saveVideoUrl error:nil];if (weakSelf.lastBackgroundTaskIdentifier!= UIBackgroundTaskInvalid) {[[UIApplication sharedApplication] endBackgroundTask:weakSelf.lastBackgroundTaskIdentifier];}if (error) {Plog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);[Utility showAllTextDialog:KAppDelegate.window Text:@"保存视频到相册发生错误"];} else {if (weakSelf.takeBlock) {weakSelf.takeBlock(assetURL);}Plog(@"成功保存视频到相簿.");[weakSelf onCancelAction:nil];}}];} else {//照片UIImageWriteToSavedPhotosAlbum(self.takeImage, self, nil, nil);if (self.takeBlock) {self.takeBlock(self.takeImage);}[self onCancelAction:nil];}
}//前后摄像头的切换
- (IBAction)onCameraAction:(UIButton *)sender {Plog(@"切换摄像头");AVCaptureDevice *currentDevice=[self.captureDeviceInput device];AVCaptureDevicePosition currentPosition=[currentDevice position];[self removeNotificationFromCaptureDevice:currentDevice];AVCaptureDevice *toChangeDevice;AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;//前if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {toChangePosition = AVCaptureDevicePositionBack;//后}toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];[self addNotificationToCaptureDevice:toChangeDevice];//获得要调整的设备输入对象AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];//改变会话的配置前一定要先开启配置,配置完成后提交配置改变[self.session beginConfiguration];//移除原有输入对象[self.session removeInput:self.captureDeviceInput];//添加新的输入对象if ([self.session canAddInput:toChangeDeviceInput]) {[self.session addInput:toChangeDeviceInput];self.captureDeviceInput = toChangeDeviceInput;}//提交会话配置[self.session commitConfiguration];
}- (void)onStartTranscribe:(NSURL *)fileURL {if ([self.captureMovieFileOutput isRecording]) {-- self.seconds;if (self.seconds > 0) {if (self.HSeconds - self.seconds >= TimeMax && !self.isVideo) {self.isVideo = YES;//长按时间超过TimeMax 表示是视频录制self.progressView.timeMax = self.seconds;}[self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];} else {if ([self.captureMovieFileOutput isRecording]) {[self.captureMovieFileOutput stopRecording];}}}
}#pragma mark - 视频输出代理
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{Plog(@"开始录制...");self.seconds = self.HSeconds;[self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
}-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{Plog(@"视频录制完成.");[self changeLayout];if (self.isVideo) {self.saveVideoUrl = outputFileURL;if (!self.player) {self.player = [[HAVPlayer alloc] initWithFrame:self.bgView.bounds withShowInView:self.bgView url:outputFileURL];} else {if (outputFileURL) {self.player.videoUrl = outputFileURL;self.player.hidden = NO;}}} else {//照片self.saveVideoUrl = nil;[self videoHandlePhoto:outputFileURL];}}- (void)videoHandlePhoto:(NSURL *)url {AVURLAsset *urlSet = [AVURLAsset assetWithURL:url];AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];imageGenerator.appliesPreferredTrackTransform = YES;    // 截图的时候调整到正确的方向NSError *error = nil;CMTime time = CMTimeMake(0,30);//缩略图创建时间 CMTime是表示电影时间信息的结构体,第一个参数表示是视频第几秒,第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)CMTime actucalTime; //缩略图实际生成的时间CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];if (error) {Plog(@"截取视频图片失败:%@",error.localizedDescription);}CMTimeShow(actucalTime);UIImage *image = [UIImage imageWithCGImage:cgImage];CGImageRelease(cgImage);if (image) {Plog(@"视频截取成功");} else {Plog(@"视频截取失败");}self.takeImage = image;//[UIImage imageWithCGImage:cgImage];[[NSFileManager defaultManager] removeItemAtURL:url error:nil];if (!self.takeImageView) {self.takeImageView = [[UIImageView alloc] initWithFrame:self.view.frame];[self.bgView addSubview:self.takeImageView];}self.takeImageView.hidden = NO;self.takeImageView.image = self.takeImage;
}#pragma mark - 通知//注册通知
- (void)setupObservers
{NSNotificationCenter *notification = [NSNotificationCenter defaultCenter];[notification addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationWillResignActiveNotification object:[UIApplication sharedApplication]];
}//进入后台就退出视频录制
- (void)applicationDidEnterBackground:(NSNotification *)notification {[self onCancelAction:nil];
}/***  给输入设备添加通知*/
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{//注意添加区域改变捕获通知必须首先设置设备允许捕获[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {captureDevice.subjectAreaChangeMonitoringEnabled=YES;}];NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];//捕获区域发生改变[notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];[notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/***  移除所有通知*/
-(void)removeNotification{NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];[notificationCenter removeObserver:self];
}-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];//会话出错[notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
}/***  设备连接成功**  @param notification 通知对象*/
-(void)deviceConnected:(NSNotification *)notification{NSLog(@"设备已连接...");
}
/***  设备连接断开**  @param notification 通知对象*/
-(void)deviceDisconnected:(NSNotification *)notification{NSLog(@"设备已断开.");
}
/***  捕获区域改变**  @param notification 通知对象*/
-(void)areaChange:(NSNotification *)notification{NSLog(@"捕获区域改变...");
}/***  会话出错**  @param notification 通知对象*/
-(void)sessionRuntimeError:(NSNotification *)notification{NSLog(@"会话发生错误.");
}/***  取得指定位置的摄像头**  @param position 摄像头位置**  @return 摄像头设备*/
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];for (AVCaptureDevice *camera in cameras) {if ([camera position] == position) {return camera;}}return nil;
}/***  改变设备属性的统一操作方法**  @param propertyChange 属性改变操作*/
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{AVCaptureDevice *captureDevice= [self.captureDeviceInput device];NSError *error;//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁if ([captureDevice lockForConfiguration:&error]) {//自动白平衡if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {[captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];}//自动根据环境条件开启闪光灯if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {[captureDevice setFlashMode:AVCaptureFlashModeAuto];}propertyChange(captureDevice);[captureDevice unlockForConfiguration];}else{NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription);}
}/***  设置闪光灯模式**  @param flashMode 闪光灯模式*/
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {if ([captureDevice isFlashModeSupported:flashMode]) {[captureDevice setFlashMode:flashMode];}}];
}
/***  设置聚焦模式**  @param focusMode 聚焦模式*/
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {if ([captureDevice isFocusModeSupported:focusMode]) {[captureDevice setFocusMode:focusMode];}}];
}
/***  设置曝光模式**  @param exposureMode 曝光模式*/
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {if ([captureDevice isExposureModeSupported:exposureMode]) {[captureDevice setExposureMode:exposureMode];}}];
}
/***  设置聚焦点**  @param point 聚焦点*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
//        if ([captureDevice isFocusPointOfInterestSupported]) {//            [captureDevice setFocusPointOfInterest:point];
//        }
//        if ([captureDevice isExposurePointOfInterestSupported]) {//            [captureDevice setExposurePointOfInterest:point];
//        }if ([captureDevice isExposureModeSupported:exposureMode]) {[captureDevice setExposureMode:exposureMode];}if ([captureDevice isFocusModeSupported:focusMode]) {[captureDevice setFocusMode:focusMode];}}];
}/***  添加点按手势,点按时聚焦*/
-(void)addGenstureRecognizer{UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];[self.bgView addGestureRecognizer:tapGesture];
}-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{if ([self.session isRunning]) {CGPoint point= [tapGesture locationInView:self.bgView];//将UI坐标转化为摄像头坐标CGPoint cameraPoint= [self.previewLayer captureDevicePointOfInterestForPoint:point];[self setFocusCursorWithPoint:point];[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:cameraPoint];}
}/***  设置聚焦光标位置**  @param point 光标位置*/
-(void)setFocusCursorWithPoint:(CGPoint)point{if (!self.isFocus) {self.isFocus = YES;self.focusCursor.center=point;self.focusCursor.transform = CGAffineTransformMakeScale(1.25, 1.25);self.focusCursor.alpha = 1.0;[UIView animateWithDuration:0.5 animations:^{self.focusCursor.transform = CGAffineTransformIdentity;} completion:^(BOOL finished) {[self performSelector:@selector(onHiddenFocusCurSorAction) withObject:nil afterDelay:0.5];}];}
}- (void)onHiddenFocusCurSorAction {self.focusCursor.alpha=0;self.isFocus = NO;
}//拍摄完成时调用
- (void)changeLayout {self.imgRecord.hidden = YES;self.btnCamera.hidden = YES;self.btnAfresh.hidden = NO;self.btnEnsure.hidden = NO;self.btnBack.hidden = YES;if (self.isVideo) {[self.progressView clearProgress];}self.afreshCenterX.constant = -(SCREEN_WIDTH/2/2);self.ensureCenterX.constant = SCREEN_WIDTH/2/2;[UIView animateWithDuration:0.25 animations:^{[self.view layoutIfNeeded];}];self.lastBackgroundTaskIdentifier = self.backgroundTaskIdentifier;self.backgroundTaskIdentifier = UIBackgroundTaskInvalid;[self.session stopRunning];
}//重新拍摄时调用
- (void)recoverLayout {if (self.isVideo) {self.isVideo = NO;[self.player stopPlayer];self.player.hidden = YES;}[self.session startRunning];if (!self.takeImageView.hidden) {self.takeImageView.hidden = YES;}
//    self.saveVideoUrl = nil;self.afreshCenterX.constant = 0;self.ensureCenterX.constant = 0;self.imgRecord.hidden = NO;self.btnCamera.hidden = NO;self.btnAfresh.hidden = YES;self.btnEnsure.hidden = YES;self.btnBack.hidden = NO;[UIView animateWithDuration:0.25 animations:^{[self.view layoutIfNeeded];}];
}/*
#pragma mark - Navigation// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {// Get the new view controller using [segue destinationViewController].// Pass the selected object to the new view controller.
}
*/@end

使用也挺简单:


- (IBAction)onCameraAction:(UIButton *)sender {//额 。。由于是demo,所以用的xib,大家根据需求自己更改,该demo只是提供一个思路,使用时不要直接拖入项目HVideoViewController *ctrl = [[NSBundle mainBundle] loadNibNamed:@"HVideoViewController" owner:nil options:nil].lastObject;ctrl.HSeconds = 30;//设置可录制最长时间ctrl.takeBlock = ^(id item) {if ([item isKindOfClass:[NSURL class]]) {NSURL *videoURL = item;//视频url} else {//图片}};[self presentViewController:ctrl animated:YES completion:nil];
}

demo地址也给出来吧:不喜勿碰-_-\
https://github.com/hkjin/KJCamera

自此就结束啦,写的比较简单,希望能帮助到大家,谢谢!

iOS开发-自定义相机(仿微信)拍照、视频录制相关推荐

  1. 高仿微信拍照,视频录制-----JCameraView

    JCameraView(1.1.9) 使用方法 Gradle依赖: compile 'cjt.library.wheel:camera :1.1.9' 引用源码 :  clone源码后 引入lib - ...

  2. Android录制视频,仿微信小视频录制(一)

    Android录制视频,第一部分自定义控件 简述 公司有一个录制视频并上传的功能,录制视频具体使用类如下:硬件控制使用Camera,视频录制的格式音频等具体配置与录制使用MediaRecorder,预 ...

  3. 仿android微信视频编辑,Android 仿微信短视频录制

    VideoRecorder Android 仿微信短视频录制 预览 Bug 修复与更新日志: 更新日志: 1.2.0:仿照微信,短按拍照长按拍摄 --19.06.21 1.1.5:增加进度条,修改依赖 ...

  4. Android 使用 CameraX 快速实现仿微信短视频录制

    Android 使用 CameraX 快速实现仿微信短视频录制(轻触拍照.长按录像) https://github.com/ldlywt/MyCameraX 微信短视频android端 https:/ ...

  5. Android 仿微信小视频录制

    Android 仿微信小视频录制 WechatShortVideo和WechatShortVideo文章

  6. Android 仿微信短视频录制

    VideoRecorder 项目地址:junerver/VideoRecorder 简介: Android 仿微信短视频录制 更多:作者   提 Bug 标签: Android 仿微信短视频录制 项目 ...

  7. Android仿微信小视频录制功能(二)

    Android仿微信小视频录制功能(二) 接着上一篇,在完成了录制功能后,伟大的哲学家沃兹基索德曾经说过:"有录就有放.",那么紧接着就来实现播放功能,按照国际惯例,先上下效果图: ...

  8. Android仿微信小视频录制功能

    还没看完,应该还不错,先收藏,觉得可以开拓 https://blog.csdn.net/u012227600/article/details/50835633 -------------------- ...

  9. FFmpeg 开发(12):Android FFmpeg 实现带滤镜的微信小视频录制功能

    前文利用 FFmpeg 分别实现了对 Android Camera2 采集的预览帧进行编码生成 mp4 文件,以及对 Android AudioRecorder 采集 PCM 音频进行编码生成 aac ...

最新文章

  1. Oracle 9i/10g/11g数据库升级路线图总览
  2. 设置 Nuget 本地源、在线私有源、自动构建打包
  3. IPV4 VS IPV6 谈谈省级ipv6的必要性
  4. spark学习-69-源代码:Endpoint模型介绍(1)
  5. dts数据库迁移工具_5分钟学会如何玩转云数据库组件(迁移,审计,订阅)
  6. java Context类
  7. 【转】 Apache分析脚本
  8. Sketch项目安装缺失字体
  9. Docker下载Nginx镜像并运行Nginx容器
  10. (亲测有效).net framework 在计算机上已安装了更高的 4.x 版本,则无法安装以前的 4.5 版本。
  11. 计算机专业sci二区论文难吗,二区SCI论文要求比国内核心高吗
  12. Windows10桌面优化 | 如何修改图标大小 | 如何把win10快捷方式小箭头去掉
  13. 《PRML》第一章 读书笔记.1
  14. SpringClude--feign介绍
  15. Ubuntu grub recuse 修复方法
  16. 【老卫搞机】136期:华为开发者联盟社区2022年度战码先锋2期开源贡献之星
  17. IT桔子IT互联网公司产品数据库及商业信息服务
  18. iOS使用libxlsxwriter导出excel文件
  19. 常用英文单词标准缩写
  20. 2022-2028年中国医药零售行业市场研究及前瞻分析报告

热门文章

  1. [转]Mac 科研常用软件
  2. 新茶饮式资本扩张,该停脚歇歇了
  3. NVIDIA TensorRT (python win10)安装成功分享
  4. Win10/Win11下图片扩展安装 HEVC/HEIF/CR2等
  5. NAACL 2021 上的图神经网络好文
  6. Tribon M3 license keygen
  7. android接入支付宝自动续费,APP是如何实现自动续费的?
  8. 数据结构求子串、非空子串、真子串、非空真子串数
  9. 符号三角形问题(回溯)
  10. c语言实现多目标优化,MOPSO 多目标例子群优化算法