@class CaptureSessionManager; @interface HabrahabrView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate> { CaptureSessionManager *captureFront; CaptureSessionManager *captureBack; UIImageView *face; }
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); // Lock the base address of the pixel buffer CVPixelBufferLockBaseAddress(imageBuffer, 0); // Get the number of bytes per row for the pixel buffer // void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); // Get the number of bytes per row for the pixel buffer size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); // Get the pixel buffer width and height size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); unsigned char* pixel = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer); CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB(); CGContextRef context=CGBitmapContextCreate(pixel, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst); CGImageRef image=CGBitmapContextCreateImage(context); CGContextRelease(context); CGColorSpaceRelease(colorSpace); UIImage *resultUIImage=[UIImage imageWithCGImage:image]; CGImageRelease(image); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); [resultUIImage retain]; [self performSelectorOnMainThread:@selector(cameraCaptureGotFrame:) withObject:resultUIImage waitUntilDone:NO]; } - (void) cameraCaptureGotFrame:(UIImage*)image { face.image = [self fixOrientation:image]; // decrement ref count [image release]; }
face = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 58, 70)]; [self addSubview:face];
[self setCaptureBack:[[[CaptureSessionManager alloc] init] autorelease]]; [[self captureBack] addVideoInput:2 PView:self]; [[self captureBack] addVideoPreviewLayer]; [[[self captureBack] captureSession] setSessionPreset:AVCaptureSessionPresetLow];
[self setCaptureFront:[[[CaptureSessionManager alloc] init] autorelease]]; [[self captureFront] addVideoInput:1 PView:self]; [[self captureFront] addVideoPreviewLayer]; [[[self captureFront] captureSession] setSessionPreset:AVCaptureSessionPresetLow];
#import "CaptureSessionManager.h" @implementation CaptureSessionManager @synthesize captureSession; @synthesize previewLayer; - (id)init { if ((self = [super init])) { [self setCaptureSession:[[AVCaptureSession alloc] init]]; } return self; } - (void)addVideoPreviewLayer { [self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:[self captureSession]] autorelease]]; [[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill]; } - (void)addVideoInput:(int)camType PView:(HabrahabrView*) habraview { NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; AVCaptureDevice *videoDevice = nil; NSInteger side = (camType==1) ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; for (AVCaptureDevice *device in videoDevices) { if (device.position == side) { videoDevice = device; break; } } if (videoDevice == nil) { videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; } if (videoDevice) { NSError *error; AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; if (!error) { if ([[self captureSession] canAddInput:videoIn]) [[self captureSession] addInput:videoIn]; else NSLog(@"Couldn't add video input"); // Set the output AVCaptureVideoDataOutput* videoOutput = [[AVCaptureVideoDataOutput alloc] init]; // create a queue to run the capture on dispatch_queue_t captureQueue=dispatch_queue_create("catpureQueue", NULL); // setup our delegate [videoOutput setSampleBufferDelegate:habraview queue:captureQueue]; // configure the pixel format videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey, nil]; if ([[self captureSession] canAddOutput:videoOutput]) [[self captureSession] addOutput:videoOutput]; else NSLog(@"Couldn't add video ouput"); } else NSLog(@"Couldn't create video input"); } else NSLog(@"Couldn't create video capture device"); } - (void)dealloc { [[self captureSession] stopRunning]; [previewLayer release], previewLayer = nil; [captureSession release], captureSession = nil; [super dealloc]; } @end
[[captureFront captureSession] startRunning];
[[captureFront captureSession] stopRunning]; [[captureBack captureSession] startRunning];
Source: https://habr.com/ru/post/148609/
All Articles