Vote count:
0
I don't know why my capture raw data would return null. Input and Output of the capture seems to declared correctly. I am trying to have r,g,b color of specific pixel from live camera view. I am not sure which part of the function is not making it work.
#import "PCamViewController.h"
@interface PCamViewController ()
@end
@implementation PCamViewController
- (void)viewDidLoad
{
[super viewDidLoad];
[self startSession];
}
- (AVCaptureSession *)session
{
if (!_session) {
_session = [[AVCaptureSession alloc]init];
}
return _session;
}
- (AVCaptureDeviceInput *)deviceInput
{
if (!_deviceInput) {
_deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
}
return _deviceInput;
}
- (void) startSession {
[self.session addInput:self.deviceInput];
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [self.captureView layer];
[rootLayer setMasksToBounds:YES];
CGRect boundCapture = self.captureView.bounds;
[previewLayer setFrame:boundCapture];
[rootLayer insertSublayer:previewLayer atIndex:0];
[self.session startRunning];
stillImageOutput = [AVCaptureStillImageOutput new];
if ([_session canAddOutput:stillImageOutput])
[_session addOutput:stillImageOutput];
NSLog(@"session activated");
}
- (IBAction)captureImage:(UIButton *)sender {
NSLog(@"captureImage button pushed");
AVCaptureConnection *stillImageConnection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:[[stillImageOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CVPixelBufferLockBaseAddress(cameraFrame, 0);
GLubyte *rawImageBytes = CVPixelBufferGetBaseAddress(cameraFrame);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(cameraFrame);
NSData *dataForRawBytes = [NSData dataWithBytes:rawImageBytes length:bytesPerRow * CVPixelBufferGetHeight(cameraFrame)];
// Do whatever with your bytes
NSLog(@"%@", dataForRawBytes);
NSLog(@"%@", rawImageBytes);
//NSLog(@"%@", imageSampleBuffer);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
}];
}
@end
asked 46 secs ago
Aucun commentaire:
Enregistrer un commentaire