@@ -1,125 +1,106 @@
- (void )setupCamera
{
self.coreImageContext = [CIContext contextWithOptions: nil ];
// session
self.cameraSession = [[AVCaptureSession alloc ] init ];
[self .cameraSession setSessionPreset: AVCaptureSessionPresetPhoto];
[self .cameraSession commitConfiguration ];
// input
AVCaptureDevice *shootingCamera = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
AVCaptureDeviceInput *shootingDevice = [AVCaptureDeviceInput deviceInputWithDevice: shootingCamera error: NULL ];
if ([self .cameraSession canAddInput: shootingDevice]) {
[self .cameraSession addInput: shootingDevice];
}
// video output
self.videoOutput = [[AVCaptureVideoDataOutput alloc ] init ];
self.videoOutput .alwaysDiscardsLateVideoFrames = YES ;
[self .videoOutput setSampleBufferDelegate: self queue: dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_HIGH, 0 )];
if ([self .cameraSession canAddOutput: self .videoOutput]) {
[self .cameraSession addOutput: self .videoOutput];
}
if (self.videoOutput .connections .count > 0 ) {
AVCaptureConnection *connection = self.videoOutput .connections [0 ];
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
self.cameraOpen = NO ;
#import " ViewController.h"
#import < CoreImage/CoreImage.h>
#import < AVFoundation/AVFoundation.h>
@interface ViewController () {
}
- (void )captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
// turn buffer into an image we can manipulate
CIImage *result = [CIImage imageWithCVPixelBuffer: imageBuffer];
CIFilter *grayscaleFilter = [CIFilter filterWithName: @" CIColorMonochrome" ];
[grayscaleFilter setDefaults ];
[grayscaleFilter setValue: result forKey: @" inputImage" ];
[grayscaleFilter setValue: [CIColor colorWithRed: .6f green: .6f blue: .6f alpha: 1 .f] forKey: @" inputColor" ];
result = grayscaleFilter.outputImage ;
// filter
CIFilter *filter = [CIFilter filterWithName: @" CIGaussianBlur" ];
[filter setDefaults ];
[filter setValue: result forKey: @" inputImage" ];
CGFloat inputRadius = 10 .0f ;
[filter setValue: @(inputRadius) forKey: @" inputRadius" ];
CGRect rect = result.extent ;
rect.origin .x += inputRadius;
rect.origin .y += inputRadius;
rect.size .width -= 2 * inputRadius;
rect.size .height -= 2 * inputRadius;
CGRect originalRect = rect;
originalRect.origin .x += 80 ;
originalRect.origin .y += 80 ;
originalRect.size .width -= 160 ;
originalRect.size .height -= 160 ;
// render image
CGImageRef blurredImage = [self .coreImageContext createCGImage: filter.outputImage fromRect: rect];
CGImageRef originalImage = [self .coreImageContext createCGImage: result fromRect: originalRect];
dispatch_async (dispatch_get_main_queue (), ^{
self.cameraView .layer .contentsGravity = @" resizeAspectFill" ;
self.cameraView .layer .contents = (__bridge id )originalImage;
self.blurCameraView .layer .contents = (__bridge id )blurredImage;
CGImageRelease (originalImage);
CGImageRelease (blurredImage);
});
@property (strong , nonatomic ) CIContext *coreImageContext;
@property (strong , nonatomic ) AVCaptureSession *cameraSession;
@property (strong , nonatomic ) AVCaptureVideoDataOutput *videoOutput;
@property (strong , nonatomic ) UIView *blurCameraView;
@property (strong , nonatomic ) CIFilter *filter;
@property BOOL cameraOpen;
@end
@implementation ViewController
- (void )viewDidLoad {
[super viewDidLoad ];
self.blurCameraView = [[UIView alloc ]initWithFrame:[[UIScreen mainScreen ] bounds ]];
[self .view addSubview: self .blurCameraView];
// setup filter
self.filter = [CIFilter filterWithName: @" CIGaussianBlur" ];
[self .filter setDefaults ];
[self .filter setValue: @(3 .0f ) forKey: @" inputRadius" ];
[self setupCamera ];
[self openCamera ];
// Do any additional setup after loading the view, typically from a nib.
}
- (void )openCamera
{
if (self.cameraOpen ) {
return ;
}
self.cameraView .alpha = 0 .0f ;
self.blurCameraView .alpha = 0 .0f ;
self.viewfinderMaskImageView .alpha = 1 .0f ;
self.shutterView .alpha = 0 .0f ;
[self .cameraSession startRunning ];
[self .view layoutIfNeeded ];
[UIView animateWithDuration: 3 .0f animations: ^{
self.cameraView .alpha = 1 .0f ;
self.blurCameraView .alpha = 1 .0f ;
self.viewfinderMaskImageView .alpha = 0 .60f ;
}];
[UIView animateWithDuration: 0 .2f delay: 2 .8f options: 0 animations: ^{
[self showShootButton ];
} completion: nil ];
self.cameraOpen = YES ;
- (void )didReceiveMemoryWarning {
[super didReceiveMemoryWarning ];
// Dispose of any resources that can be recreated.
}
- (void )closeCamera
- (void )setupCamera
{
if (!self.cameraOpen ) {
return ; // camera is closed
}
[UIView animateWithDuration: 0 .5f animations: ^{
self.cameraView .alpha = 0 .0f ;
self.blurCameraView .alpha = 0 .0f ;
self.viewfinderMaskImageView .alpha = 1 .0f ;
self.shutterView .alpha = 0 .0f ;
} completion: ^(BOOL finished) {
[self .cameraSession stopRunning ];
}];
[UIView animateWithDuration: 0 .2f delay: 0 .0f options: 0 animations: ^{
[self hideShootButton ];
} completion: nil ];
self.cameraOpen = NO ;
self.coreImageContext = [CIContext contextWithOptions: @{kCIContextUseSoftwareRenderer : @(YES )}];
// session
self.cameraSession = [[AVCaptureSession alloc ] init ];
[self .cameraSession setSessionPreset: AVCaptureSessionPresetLow];
[self .cameraSession commitConfiguration ];
// input
AVCaptureDevice *shootingCamera = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
AVCaptureDeviceInput *shootingDevice = [AVCaptureDeviceInput deviceInputWithDevice: shootingCamera error: NULL ];
if ([self .cameraSession canAddInput: shootingDevice]) {
[self .cameraSession addInput: shootingDevice];
}
// video output
self.videoOutput = [[AVCaptureVideoDataOutput alloc ] init ];
self.videoOutput .alwaysDiscardsLateVideoFrames = YES ;
[self .videoOutput setSampleBufferDelegate: self queue: dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_HIGH, 0 )];
if ([self .cameraSession canAddOutput: self .videoOutput]) {
[self .cameraSession addOutput: self .videoOutput];
}
if (self.videoOutput .connections .count > 0 ) {
AVCaptureConnection *connection = self.videoOutput .connections [0 ];
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
self.cameraOpen = NO ;
}
- (void )captureOutput : (AVCaptureOutput *)captureOutput didOutputSampleBuffer : (CMSampleBufferRef)sampleBuffer fromConnection : (AVCaptureConnection *)connection {
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
// turn buffer into an image we can manipulate
CIImage *result = [CIImage imageWithCVPixelBuffer: imageBuffer];
// filter
[self .filter setValue: result forKey: @" inputImage" ];
// render image
CGImageRef blurredImage = [self .coreImageContext createCGImage: self .filter.outputImage fromRect: result.extent];
dispatch_async (dispatch_get_main_queue (), ^{
self.blurCameraView .layer .contents = (__bridge id )blurredImage;
CGImageRelease (blurredImage);
});
}
- (void )openCamera {
if (self.cameraOpen ) {
return ;
}
self.blurCameraView .alpha = 0 .0f ;
[self .cameraSession startRunning ];
[self .view layoutIfNeeded ];
[UIView animateWithDuration: 3 .0f animations: ^{
self.blurCameraView .alpha = 1 .0f ;
}];
self.cameraOpen = YES ;
}