我刚刚开始使用Objective-C,我正在尝试创建一个简单的应用程序,它会显示带有模糊效果的摄像机视图.我让Camera输出与AVFoundation框架一起工作.现在,我正在尝试连接核心图像框架,但不知道如何,Apple文档让我感到困惑,在线搜索指南和教程导致没有结果.在此先感谢您的帮助.
- #import "ViewController.h"
- #import <AVFoundation/AVFoundation.h>
- @interface ViewController ()
- @property (strong,nonatomic) CIContext *context;
- @end
- @implementation ViewController
- AVCaptureSession *session;
- AVCaptureStillImageOutput *stillImageOutput;
- -(CIContext *)context
- {
- if(!_context)
- {
- _context = [CIContext contextWithOptions:nil];
- }
- return _context;
- }
- - (void)viewDidLoad {
- [super viewDidLoad];
- // Do any additional setup after loading the view,typically from a nib.
- }
- -(void)viewWillAppear:(BOOL)animated{
- session = [[AVCaptureSession alloc] init];
- [session setSessionPreset:AVCaptureSessionPresetPhoto];
- AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- NSError *error;
- AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
- if ([session canAddInput:deviceInput]) {
- [session addInput:deviceInput];
- }
- AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
- [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
- CALayer *rootLayer = [[self view] layer];
- [rootLayer setMasksToBounds:YES];
- CGRect frame = self.imageView.frame;
- [previewLayer setFrame:frame];
- [previewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
- [rootLayer insertSublayer:previewLayer atIndex:0];
- stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
- NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
- [stillImageOutput setOutputSettings:outputSettings];
- [session addOutput:stillImageOutput];
- [session startRunning];
- }
- @end
解决方法
这是让你入门的东西.这是以下链接的代码的更新版本.
https://gist.github.com/eladb/9662102
https://gist.github.com/eladb/9662102
诀窍是使用AVCaptureVideoDataOutputSampleBufferDelegate.
使用此委托,您可以使用imageWithCVPixelBuffer从相机缓冲区构建CIImage.
现在虽然我想弄清楚如何减少滞后.我会尽快更新.
更新:延迟现在很小,并且在某些影响上无法察觉.不幸的是,模糊似乎是最慢的之一.您可能想要查看vImage.
- #import "ViewController.h"
- #import <CoreImage/CoreImage.h>
- #import <AVFoundation/AVFoundation.h>
- @interface ViewController () {
- }
- @property (strong,nonatomic) CIContext *coreImageContext;
- @property (strong,nonatomic) AVCaptureSession *cameraSession;
- @property (strong,nonatomic) AVCaptureVideoDataOutput *videoOutput;
- @property (strong,nonatomic) UIView *blurCameraView;
- @property (strong,nonatomic) CIFilter *filter;
- @property BOOL cameraOpen;
- @end
- @implementation ViewController
- - (void)viewDidLoad {
- [super viewDidLoad];
- self.blurCameraView = [[UIView alloc]initWithFrame:[[UIScreen mainScreen] bounds]];
- [self.view addSubview:self.blurCameraView];
- //setup filter
- self.filter = [CIFilter filterWithName:@"CIGaussianBlur"];
- [self.filter setDefaults];
- [self.filter setValue:@(3.0f) forKey:@"inputRadius"];
- [self setupCamera];
- [self openCamera];
- // Do any additional setup after loading the view,typically from a nib.
- }
- - (void)didReceiveMemoryWarning {
- [super didReceiveMemoryWarning];
- // Dispose of any resources that can be recreated.
- }
- - (void)setupCamera
- {
- self.coreImageContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer : @(YES)}];
- // session
- self.cameraSession = [[AVCaptureSession alloc] init];
- [self.cameraSession setSessionPreset:AVCaptureSessionPresetLow];
- [self.cameraSession commitConfiguration];
- // input
- AVCaptureDevice *shootingCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- AVCaptureDeviceInput *shootingDevice = [AVCaptureDeviceInput deviceInputWithDevice:shootingCamera error:NULL];
- if ([self.cameraSession canAddInput:shootingDevice]) {
- [self.cameraSession addInput:shootingDevice];
- }
- // video output
- self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
- self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
- [self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0)];
- if ([self.cameraSession canAddOutput:self.videoOutput]) {
- [self.cameraSession addOutput:self.videoOutput];
- }
- if (self.videoOutput.connections.count > 0) {
- AVCaptureConnection *connection = self.videoOutput.connections[0];
- connection.videoOrientation = AVCaptureVideoOrientationPortrait;
- }
- self.cameraOpen = NO;
- }
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
- // Get a CMSampleBuffer's Core Video image buffer for the media data
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- // turn buffer into an image we can manipulate
- CIImage *result = [CIImage imageWithCVPixelBuffer:imageBuffer];
- // filter
- [self.filter setValue:result forKey:@"inputImage"];
- // render image
- CGImageRef blurredImage = [self.coreImageContext createCGImage:self.filter.outputImage fromRect:result.extent];
- dispatch_async(dispatch_get_main_queue(),^{
- self.blurCameraView.layer.contents = (__bridge id)blurredImage;
- CGImageRelease(blurredImage);
- });
- }
- - (void)openCamera {
- if (self.cameraOpen) {
- return;
- }
- self.blurCameraView.alpha = 0.0f;
- [self.cameraSession startRunning];
- [self.view layoutIfNeeded];
- [UIView animateWithDuration:3.0f animations:^{
- self.blurCameraView.alpha = 1.0f;
- }];
- self.cameraOpen = YES;
- }