现在的位置: 首页 > 综合 > 正文

iPhone摄像头设备获取

2018年04月15日 ⁄ 综合 ⁄ 共 10494字 ⁄ 字号 评论关闭

目的:打开、关闭前置摄像头,绘制图像,并获取摄像头的二进制数据。

需要的库

AVFoundation.framework 、CoreVideo.framework 、CoreMedia.framework 、QuartzCore.framework

该摄像头捕抓必须编译真机的版本,模拟器下编译不了。

函数说明


- (void)createControl

{

// UI界面控件的创建

}

- (AVCaptureDevice *)getFrontCamera;
获取前置摄像头设备
- (void)startVideoCapture;
打开摄像头并开始捕捉图像

其中代码:

AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self->avCaptureSession];

previewLayer.frame = localView.bounds;

previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self->localView.layer addSublayer:
previewLayer];
 

为把图片画到UIView里面


- (void)stopVideoCapture:(id)arg;


关闭摄像头,停止捕抓图像

其中代码:

for(UIView*viewinself->localView.subviews)
{

[viewremoveFromSuperview];

}



为移除摄像头图像的View

详情见代码,代码拷过去可以直接使用      Over!!!!



代码:

头文件:


//

//  AVCallController.h

//  Pxlinstall

//

//  Created by Lin Charlie C. on 11-3-24.

//  Copyright 2011  xxxx. All rights reserved.

//


#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>




@interface AVCallController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>

{
//UI
UILabel*labelState;
UIButton*btnStartVideo;
UIView*localView;


AVCaptureSession* avCaptureSession;

AVCaptureDevice *avCaptureDevice;
BOOLfirstFrame//是否为第一帧
intproducerFps;



}
@property (nonatomicretain)
AVCaptureSession *avCaptureSession;
@property (nonatomicretain)
UILabel *labelState;



- (void)createControl;

- (AVCaptureDevice *)getFrontCamera;

- (void)startVideoCapture;

- (void)stopVideoCapture:(id)arg;

@end

/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

实现文件:

    //

//  AVCallController.m

//  Pxlinstall

//

//  Created by Lin Charlie C. on 11-3-24.

//  Copyright 2011  高鸿移通. All rights reserved.

//


#import "AVCallController.h"




@implementation AVCallController


@synthesize avCaptureSession;
@synthesize labelState;



// The designated initializer.  Override if you create the controller programmatically and want to perform customization that is not appropriate for viewDidLoad.

/*

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {

    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];

    if (self) {

        // Custom initialization.

    }

    return self;

}

*/

-(id)init

{
if(self=
[
superinit])

{
firstFrameYES;
producerFps50;

}
returnself;

}



// Implement loadView to create a view hierarchy programmatically, without using a nib.

- (void)loadView {
[superloadView];
[selfcreateControl];

}





/*

// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.

- (void)viewDidLoad {

    [super viewDidLoad];

}

*/



/*

// Override to allow orientati***** other than the default portrait orientation.

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {

    // Return YES for supported orientati*****.

    return (interfaceOrientation == UIInterfaceOrientationPortrait);

}

*/



- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[superdidReceiveMemoryWarning];

// Release any cached data, images, etc. that aren't in use.

}



- (void)viewDidUnload {
[superviewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;

}





- (void)dealloc {

    [super dealloc];

}



#pragma mark -

#pragma mark createControl

- (void)createControl

{
//UI展示
self.view.backgroundColor=
[
UIColorgrayColor];
labelState=
[[
UILabelallocinitWithFrame:CGRectMake(102022030)];
labelState.backgroundColor=
[
UIColorclearColor];
[self.viewaddSubview:labelState];
[labelStaterelease];

btnStartVideo=
[[
UIButtonallocinitWithFrame:CGRectMake(203508050)];
[btnStartVideosetTitle:@"Star"forState:UIControlStateNormal];


[btnStartVideosetBackgroundImage:[UIImageimageNamed:@"Images/button.png"forState:UIControlStateNormal];
[btnStartVideoaddTarget:selfaction:@selector(startVideoCaptureforControlEvents:UIControlEventTouchUpInside];
[self.viewaddSubview:btnStartVideo];
[btnStartVideorelease];

UIButton*
stop = [[
UIButtonallocinitWithFrame:CGRectMake(1203508050)];
[stop setTitle:@"Stop"forState:UIControlStateNormal];

[stop setBackgroundImage:[UIImageimageNamed:@"Images/button.png"forState:UIControlStateNormal];
[stop addTarget:selfaction:@selector(stopVideoCapture:) forControlEvents:UIControlEventTouchUpInside];

[self.view addSubview:stop];

[stop release];

localView=
[[
UIViewallocinitWithFrame:CGRectMake(4050200300)];
[self.viewaddSubview:localView];
[localViewrelease];



}

#pragma mark -

#pragma mark VideoCapture

- (AVCaptureDevice *)getFrontCamera

{
//获取前置摄像头设备
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *device in cameras)

{

        if (device.position == AVCaptureDevicePositionFront)

            return device;

    }

    return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];


}

- (void)startVideoCapture

{
//打开摄像设备,并开始捕抓图像
[labelStatesetText:@"Starting
Video stream"];
if(self->avCaptureDevice|| self->avCaptureSession)

{
[labelStatesetText:@"Already
capturing"];
return;

}

if((self->avCaptureDevice =
[self getFrontCamera]) == nil)

{
[labelStatesetText:@"Failed
to get valide capture device"];
return;

}

NSError *error = nil;

    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:self->avCaptureDevice error:&error];

    if (!videoInput)

{
[labelStatesetText:@"Failed
to get video input"];
self->avCaptureDevicenil;

        return;

    }


    self->avCaptureSession = [[AVCaptureSession allocinit];

    self->avCaptureSession.sessionPreset
= AVCaptureSessionPresetLow;

    [self->avCaptureSession addInput:videoInput];

// Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Recommended pixel format choices are 
// kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange or kCVPixelFormatType_32BGRA. 
// On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
//

    AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput allocinit];
NSDictionary*settings
= [[
NSDictionaryallocinitWithObjectsAndKeys:
//[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
[NSNumbernumberWithInt:240],
(
id)kCVPixelBufferWidthKey,

                              [NSNumber numberWithInt:320],
(id)kCVPixelBufferHeightKey,

  nil];

    avCaptureVideoDataOutput.videoSettings = settings;

    [settings release];

    avCaptureVideoDataOutput.minFrameDuration = CMTimeMake(1self->producerFps);
/*We create a serial queue to handle the processing of our frames*/
dispatch_queue_tqueue
dispatch_queue_create("org.doubango.idoubs"NULL);

    [avCaptureVideoDataOutput setSampleBufferDelegate:self queue:queue];

    [self->avCaptureSession addOutput:avCaptureVideoDataOutput];

    [avCaptureVideoDataOutput release];
dispatch_release(queue);


AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self->avCaptureSession];

previewLayer.frame = localView.bounds;
previewLayer.videoGravityAVLayerVideoGravityResizeAspectFill;


[self->localView.layer addSublayer:
previewLayer];

self->firstFrameYES;

    [self->avCaptureSession startRunning];

[labelStatesetText:@"Video
capture started"];


}

- (void)stopVideoCapture:(id)arg

{
//停止摄像头捕抓
if(self->avCaptureSession){

[self->avCaptureSession stopRunning];
self->avCaptureSessionnil;
[labelStatesetText:@"Video
capture stopped"];

}
self->avCaptureDevicenil;
//移除localView里面的内容
for(UIView*viewinself->localView.subviews)
{

[viewremoveFromSuperview];

}

}

#pragma mark -

#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 

{
//捕捉数据输出 要怎么处理虽你便
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the buffer*/
if(CVPixelBufferLockBaseAddress(pixelBuffer, 0)
== 
kCVReturnSuccess)

{

        UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddress(pixelBuffer);

        size_t buffeSize = CVPixelBufferGetDataSize(pixelBuffer);

if(self->firstFrame)


if(1)

{
//第一次数据要求:宽高,类型
int width = CVPixelBufferGetWidth(pixelBuffer);
int height = CVPixelBufferGetHeight(pixelBuffer);

int pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
switch (pixelFormat) {
casekCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_nv12; // iPhone 3GS or 4
NSLog(@"Capture
pixel format=NV12");
break;
casekCVPixelFormatType_422YpCbCr8:
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_uyvy422; // iPhone 3
NSLog(@"Capture
pixel format=UYUY422");
break;
default:
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_rgb32;
NSLog(@"Capture
pixel format=RGB32");
break;

}

self->firstFrame = NO;

}

}
/*We unlock the buffer*/
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 

    }
/*We create an autorelease pool because as we are not in the main_queue our code is

 not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/

// NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

// 

//    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 

//    /*Lock the image buffer*/

//    CVPixelBufferLockBaseAddress(imageBuffer,0); 

//    /*Get information about the image*/

//    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 

//    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 

//    size_t width = CVPixelBufferGetWidth(imageBuffer); 

//    size_t height = CVPixelBufferGetHeight(imageBuffer);  

//    

//    /*Create a CGImageRef from the CVImageBufferRef*/

//    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

//    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

//    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 

// 

//    /*We release some components*/

//    CGContextRelease(newContext); 

//    CGColorSpaceRelease(colorSpace);

//    

//    /*We display the result on the custom layer. All the display stuff must be done in the main thread because

//  UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)

//  we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/

// [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];

// 

// /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).

//  Same thing as for the CALayer we are not in the main thread so ...*/

// UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];

// 

// /*We relase the CGImageRef*/

// CGImageRelease(newImage);

// 

// [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

// 

// /*We unlock the  image buffer*/

// CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// 

// [pool drain];

}

@end

抱歉!评论已关闭.