目的:打開、關閉前置攝像頭,繪制圖像,并獲取攝像頭的二進制數據。
需要的庫
AVFoundation.framework?、CoreVideo.framework?、CoreMedia.framework?、QuartzCore.framework
該攝像頭捕抓必須編譯真機的版本,模擬器下編譯不了。
函數說明
-?(void)createControl
{
//?UI界面控件的創建
}
-?(AVCaptureDevice?*)getFrontCamera;
獲取前置攝像頭設備
-?(void)startVideoCapture;
打開攝像頭并開始捕捉圖像
其中代碼:
AVCaptureVideoPreviewLayer*?previewLayer?=?[AVCaptureVideoPreviewLayer?layerWithSession:?self->avCaptureSession];
previewLayer.frame?=?localView.bounds;
previewLayer.videoGravity?=?AVLayerVideoGravityResizeAspectFill;
[self->localView.layer?addSublayer:?previewLayer];?
為把圖片畫到UIView里面
-?(void)stopVideoCapture:(id)arg;
關閉攝像頭,停止捕抓圖像
其中代碼:
for(UIView*viewinself->localView.subviews)?{
[viewremoveFromSuperview];
}
為移除攝像頭圖像的View
詳情見代碼,代碼拷過去可以直接使用??????Over!!!!
代碼:
頭文件:
//
//??AVCallController.h
//??Pxlinstall
//
//??Created?by?Lin?Charlie?C.?on?11-3-24.
//??Copyright?2011??xxxx.?All?rights?reserved.
//
#import?<UIKit/UIKit.h>
#import?<AVFoundation/AVFoundation.h>
@interface?AVCallController?:?UIViewController?<AVCaptureVideoDataOutputSampleBufferDelegate>
{
//UI
UILabel*labelState;
UIButton*btnStartVideo;
UIView*localView;
AVCaptureSession*?avCaptureSession;
AVCaptureDevice?*avCaptureDevice;
BOOLfirstFrame;?//是否為第一幀
intproducerFps;
}
@property?(nonatomic,?retain)?AVCaptureSession?*avCaptureSession;
@property?(nonatomic,?retain)?UILabel?*labelState;
-?(void)createControl;
-?(AVCaptureDevice?*)getFrontCamera;
-?(void)startVideoCapture;
-?(void)stopVideoCapture:(id)arg;
@end
/
/
/
實現文件:
????//
//??AVCallController.m
//??Pxlinstall
//
//??Created?by?Lin?Charlie?C.?on?11-3-24.
//??Copyright?2011??高鴻移通.?All?rights?reserved.
//
#import?"AVCallController.h"
@implementation?AVCallController
@synthesize?avCaptureSession;
@synthesize?labelState;
//?The?designated?initializer.??Override?if?you?create?the?controller?programmatically?and?want?to?perform?customization?that?is?not?appropriate?for?viewDidLoad.
/*
-?(id)initWithNibName:(NSString?*)nibNameOrNil?bundle:(NSBundle?*)nibBundleOrNil?{
????self?=?[super?initWithNibName:nibNameOrNil?bundle:nibBundleOrNil];
????if?(self)?{
????????//?Custom?initialization.
????}
????return?self;
}
*/
-(id)init
{
if(self=?[superinit])
{
firstFrame=?YES;
producerFps=?50;
}
returnself;
}
//?Implement?loadView?to?create?a?view?hierarchy?programmatically,?without?using?a?nib.
-?(void)loadView?{
[superloadView];
[selfcreateControl];
}
/*
//?Implement?viewDidLoad?to?do?additional?setup?after?loading?the?view,?typically?from?a?nib.
-?(void)viewDidLoad?{
????[super?viewDidLoad];
}
*/
/*
//?Override?to?allow?orientati*****?other?than?the?default?portrait?orientation.
-?(BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation?{
????//?Return?YES?for?supported?orientati*****.
????return?(interfaceOrientation?==?UIInterfaceOrientationPortrait);
}
*/
-?(void)didReceiveMemoryWarning?{
//?Releases?the?view?if?it?doesn't?have?a?superview.
[superdidReceiveMemoryWarning];
//?Release?any?cached?data,?images,?etc.?that?aren't?in?use.
}
-?(void)viewDidUnload?{
[superviewDidUnload];
//?Release?any?retained?subviews?of?the?main?view.
//?e.g.?self.myOutlet?=?nil;
}
-?(void)dealloc?{
????[super?dealloc];
}
#pragma?mark?-
#pragma?mark?createControl
-?(void)createControl
{
//UI展示
self.view.backgroundColor=?[UIColorgrayColor];
labelState=?[[UILabelalloc]?initWithFrame:CGRectMake(10,?20,?220,?30)];
labelState.backgroundColor=?[UIColorclearColor];
[self.viewaddSubview:labelState];
[labelStaterelease];
btnStartVideo=?[[UIButtonalloc]?initWithFrame:CGRectMake(20,?350,?80,?50)];
[btnStartVideosetTitle:@"Star"forState:UIControlStateNormal];
[btnStartVideosetBackgroundImage:[UIImageimageNamed:@"Images/button.png"]?forState:UIControlStateNormal];
[btnStartVideoaddTarget:selfaction:@selector(startVideoCapture)?forControlEvents:UIControlEventTouchUpInside];
[self.viewaddSubview:btnStartVideo];
[btnStartVideorelease];
UIButton*?stop?=?[[UIButtonalloc]?initWithFrame:CGRectMake(120,?350,?80,?50)];
[stop?setTitle:@"Stop"forState:UIControlStateNormal];
[stop?setBackgroundImage:[UIImageimageNamed:@"Images/button.png"]?forState:UIControlStateNormal];
[stop?addTarget:selfaction:@selector(stopVideoCapture:)?forControlEvents:UIControlEventTouchUpInside];
[self.view?addSubview:stop];
[stop?release];
localView=?[[UIViewalloc]?initWithFrame:CGRectMake(40,?50,?200,?300)];
[self.viewaddSubview:localView];
[localViewrelease];
}
#pragma?mark?-
#pragma?mark?VideoCapture
-?(AVCaptureDevice?*)getFrontCamera
{
//獲取前置攝像頭設備
NSArray?*cameras?=?[AVCaptureDevice?devicesWithMediaType:AVMediaTypeVideo];
????for?(AVCaptureDevice?*device?in?cameras)
{
????????if?(device.position?==?AVCaptureDevicePositionFront)
????????????return?device;
????}
????return?[AVCaptureDevice?defaultDeviceWithMediaType:AVMediaTypeVideo];
}
-?(void)startVideoCapture
{
//打開攝像設備,并開始捕抓圖像
[labelStatesetText:@"Starting?Video?stream"];
if(self->avCaptureDevice||?self->avCaptureSession)
{
[labelStatesetText:@"Already?capturing"];
return;
}
if((self->avCaptureDevice?=?[self?getFrontCamera])?==?nil)
{
[labelStatesetText:@"Failed?to?get?valide?capture?device"];
return;
}
NSError?*error?=?nil;
????AVCaptureDeviceInput?*videoInput?=?[AVCaptureDeviceInput?deviceInputWithDevice:self->avCaptureDevice?error:&error];
????if?(!videoInput)
{
[labelStatesetText:@"Failed?to?get?video?input"];
self->avCaptureDevice=?nil;
????????return;
????}
????self->avCaptureSession?=?[[AVCaptureSession?alloc]?init];
????self->avCaptureSession.sessionPreset?=?AVCaptureSessionPresetLow;
????[self->avCaptureSession?addInput:videoInput];
//?Currently,?the?only?supported?key?is?kCVPixelBufferPixelFormatTypeKey.?Recommended?pixel?format?choices?are?
//?kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange?or?kCVPixelFormatType_32BGRA.?
//?On?iPhone?3G,?the?recommended?pixel?format?choices?are?kCVPixelFormatType_422YpCbCr8?or?kCVPixelFormatType_32BGRA.
//
????AVCaptureVideoDataOutput?*avCaptureVideoDataOutput?=?[[AVCaptureVideoDataOutput?alloc]?init];
NSDictionary*settings?=?[[NSDictionaryalloc]?initWithObjectsAndKeys:
//[NSNumber?numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],?kCVPixelBufferPixelFormatTypeKey,
[NSNumbernumberWithInt:240],?(id)kCVPixelBufferWidthKey,
??????????????????????????????[NSNumber?numberWithInt:320],?(id)kCVPixelBufferHeightKey,
??nil];
????avCaptureVideoDataOutput.videoSettings?=?settings;
????[settings?release];
????avCaptureVideoDataOutput.minFrameDuration?=?CMTimeMake(1,?self->producerFps);
/*We?create?a?serial?queue?to?handle?the?processing?of?our?frames*/
dispatch_queue_tqueue?=?dispatch_queue_create("org.doubango.idoubs",?NULL);
????[avCaptureVideoDataOutput?setSampleBufferDelegate:self?queue:queue];
????[self->avCaptureSession?addOutput:avCaptureVideoDataOutput];
????[avCaptureVideoDataOutput?release];
dispatch_release(queue);
AVCaptureVideoPreviewLayer*?previewLayer?=?[AVCaptureVideoPreviewLayer?layerWithSession:?self->avCaptureSession];
previewLayer.frame?=?localView.bounds;
previewLayer.videoGravity=?AVLayerVideoGravityResizeAspectFill;
[self->localView.layer?addSublayer:?previewLayer];
self->firstFrame=?YES;
????[self->avCaptureSession?startRunning];
[labelStatesetText:@"Video?capture?started"];
}
-?(void)stopVideoCapture:(id)arg
{
//停止攝像頭捕抓
if(self->avCaptureSession){
[self->avCaptureSession?stopRunning];
self->avCaptureSession=?nil;
[labelStatesetText:@"Video?capture?stopped"];
}
self->avCaptureDevice=?nil;
//移除localView里面的內容
for(UIView*viewinself->localView.subviews)?{
[viewremoveFromSuperview];
}
}
#pragma?mark?-
#pragma?mark?AVCaptureVideoDataOutputSampleBufferDelegate
-?(void)captureOutput:(AVCaptureOutput?*)captureOutput?didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer?fromConnection:(AVCaptureConnection?*)connection?
{
//捕捉數據輸出?要怎么處理雖你便
CVPixelBufferRef?pixelBuffer?=?CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock?the?buffer*/
if(CVPixelBufferLockBaseAddress(pixelBuffer,?0)?==?kCVReturnSuccess)
{
????????UInt8?*bufferPtr?=?(UInt8?*)CVPixelBufferGetBaseAddress(pixelBuffer);
????????size_t?buffeSize?=?CVPixelBufferGetDataSize(pixelBuffer);
if(self->firstFrame)
{?
if(1)
{
//第一次數據要求:寬高,類型
int?width?=?CVPixelBufferGetWidth(pixelBuffer);
int?height?=?CVPixelBufferGetHeight(pixelBuffer);
int?pixelFormat?=?CVPixelBufferGetPixelFormatType(pixelBuffer);
switch?(pixelFormat)?{
casekCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
//TMEDIA_PRODUCER(producer)->video.chroma?=?tmedia_nv12;?//?iPhone?3GS?or?4
NSLog(@"Capture?pixel?format=NV12");
break;
casekCVPixelFormatType_422YpCbCr8:
//TMEDIA_PRODUCER(producer)->video.chroma?=?tmedia_uyvy422;?//?iPhone?3
NSLog(@"Capture?pixel?format=UYUY422");
break;
default:
//TMEDIA_PRODUCER(producer)->video.chroma?=?tmedia_rgb32;
NSLog(@"Capture?pixel?format=RGB32");
break;
}
self->firstFrame?=?NO;
}
}
/*We?unlock?the?buffer*/
CVPixelBufferUnlockBaseAddress(pixelBuffer,?0);?
????}
/*We?create?an?autorelease?pool?because?as?we?are?not?in?the?main_queue?our?code?is
?not?executed?in?the?main?thread.?So?we?have?to?create?an?autorelease?pool?for?the?thread?we?are?in*/
//?NSAutoreleasePool?*?pool?=?[[NSAutoreleasePool?alloc]?init];
//?
//????CVImageBufferRef?imageBuffer?=?CMSampleBufferGetImageBuffer(sampleBuffer);?
//????/*Lock?the?image?buffer*/
//????CVPixelBufferLockBaseAddress(imageBuffer,0);?
//????/*Get?information?about?the?image*/
//????uint8_t?*baseAddress?=?(uint8_t?*)CVPixelBufferGetBaseAddress(imageBuffer);?
//????size_t?bytesPerRow?=?CVPixelBufferGetBytesPerRow(imageBuffer);?
//????size_t?width?=?CVPixelBufferGetWidth(imageBuffer);?
//????size_t?height?=?CVPixelBufferGetHeight(imageBuffer);??
//????
//????/*Create?a?CGImageRef?from?the?CVImageBufferRef*/
//????CGColorSpaceRef?colorSpace?=?CGColorSpaceCreateDeviceRGB();?
//????CGContextRef?newContext?=?CGBitmapContextCreate(baseAddress,?width,?height,?8,?bytesPerRow,?colorSpace,?kCGBitmapByteOrder32Little?|?kCGImageAlphaPremultipliedFirst);
//????CGImageRef?newImage?=?CGBitmapContextCreateImage(newContext);?
//?
//????/*We?release?some?components*/
//????CGContextRelease(newContext);?
//????CGColorSpaceRelease(colorSpace);
//????
//????/*We?display?the?result?on?the?custom?layer.?All?the?display?stuff?must?be?done?in?the?main?thread?because
//??UIKit?is?no?thread?safe,?and?as?we?are?not?in?the?main?thread?(remember?we?didn't?use?the?main_queue)
//??we?use?performSelectorOnMainThread?to?call?our?CALayer?and?tell?it?to?display?the?CGImage.*/
//?[self.customLayer?performSelectorOnMainThread:@selector(setContents:)?withObject:?(id)?newImage?waitUntilDone:YES];
//?
//?/*We?display?the?result?on?the?image?view?(We?need?to?change?the?orientation?of?the?image?so?that?the?video?is?displayed?correctly).
//??Same?thing?as?for?the?CALayer?we?are?not?in?the?main?thread?so?...*/
//?UIImage?*image=?[UIImage?imageWithCGImage:newImage?scale:1.0?orientation:UIImageOrientationRight];
//?
//?/*We?relase?the?CGImageRef*/
//?CGImageRelease(newImage);
//?
//?[self.imageView?performSelectorOnMainThread:@selector(setImage:)?withObject:image?waitUntilDone:YES];
//?
//?/*We?unlock?the??image?buffer*/
//?CVPixelBufferUnlockBaseAddress(imageBuffer,0);
//?
//?[pool?drain];
}
@end
楊航收集技術資料,分享給大家