VideoCaptureDevice.m 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. //
  2. // VideoCaptureDevice.m
  3. // IDLFaceSDKDemoOC
  4. //
  5. // Created by 阿凡树 on 2017/5/23.
  6. // Copyright © 2017年 Baidu. All rights reserved.
  7. //
  8. #import "VideoCaptureDevice.h"
  9. @interface VideoCaptureDevice () <AVCaptureVideoDataOutputSampleBufferDelegate> {
  10. dispatch_queue_t _videoBufferQueue;
  11. }
  12. @property (nonatomic, readwrite, retain) AVCaptureSession *captureSession;
  13. @property (nonatomic, readwrite, retain) AVCaptureDevice *captureDevice;
  14. @property (nonatomic, readwrite, retain) AVCaptureDeviceInput *captureInput;
  15. @property (nonatomic, readwrite, retain) AVCaptureVideoDataOutput *videoDataOutput;
  16. @property (nonatomic, readwrite, assign) BOOL isSessionBegin;
  17. @end
  18. @implementation VideoCaptureDevice
  19. - (void)setPosition:(AVCaptureDevicePosition)position {
  20. if (_position ^ position) {
  21. _position = position;
  22. if (self.isSessionBegin) {
  23. [self resetSession];
  24. }
  25. }
  26. }
  27. - (instancetype)init {
  28. if (self = [super init]) {
  29. _captureSession = [[AVCaptureSession alloc] init];
  30. _videoBufferQueue = dispatch_queue_create("video_buffer_handle_queue", NULL);
  31. _isSessionBegin = NO;
  32. _position = AVCaptureDevicePositionFront;
  33. }
  34. return self;
  35. }
  36. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
  37. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  38. for (AVCaptureDevice *device in devices) {
  39. if ([device position] == position) {
  40. return device;
  41. }
  42. }
  43. return nil;
  44. }
  45. - (void)startSession {
  46. #if TARGET_OS_SIMULATOR
  47. NSLog(@"模拟器没有摄像头,此功能只有真机可用");
  48. #else
  49. if (self.captureSession.running) {
  50. return;
  51. }
  52. if (!self.isSessionBegin) {
  53. self.isSessionBegin = YES;
  54. // 配置相机设备
  55. _captureDevice = [self cameraWithPosition:_position];
  56. // 初始化输入
  57. NSError *error = nil;
  58. _captureInput = [[AVCaptureDeviceInput alloc] initWithDevice:_captureDevice error:&error];
  59. if (error == nil) {
  60. [_captureSession addInput:_captureInput];
  61. } else {
  62. if ([self.delegate respondsToSelector:@selector(captureError)]) {
  63. [self.delegate captureError];
  64. }
  65. }
  66. // 输出设置
  67. _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
  68. _videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
  69. [_videoDataOutput setSampleBufferDelegate:self queue:_videoBufferQueue];
  70. [_captureSession addOutput:_videoDataOutput];
  71. AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  72. connection.videoOrientation = AVCaptureVideoOrientationPortrait;
  73. // 调节摄像头翻转
  74. connection.videoMirrored = (_position == AVCaptureDevicePositionFront);
  75. [self.captureSession startRunning];
  76. }
  77. #endif
  78. }
  79. - (void)stopSession {
  80. #if TARGET_OS_SIMULATOR
  81. NSLog(@"模拟器没有摄像头,此功能只有真机可用");
  82. #else
  83. if (!self.captureSession.running) {
  84. return;
  85. }
  86. if(self.isSessionBegin){
  87. self.isSessionBegin = NO;
  88. [self.captureSession stopRunning];
  89. if(nil != self.captureInput){
  90. [self.captureSession removeInput:self.captureInput];
  91. }
  92. if(nil != self.videoDataOutput){
  93. [self.captureSession removeOutput:self.videoDataOutput];
  94. }
  95. }
  96. #endif
  97. }
  98. - (void)resetSession {
  99. [self stopSession];
  100. [self startSession];
  101. }
  102. #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
  103. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  104. if (!_runningStatus) {
  105. return;
  106. }
  107. UIImage* sampleImage = [self imageFromSamplePlanerPixelBuffer:sampleBuffer];
  108. if ([self.delegate respondsToSelector:@selector(captureOutputSampleBuffer:)] && sampleImage != nil) {
  109. [self.delegate captureOutputSampleBuffer:sampleImage];
  110. }
  111. }
  112. /**
  113. * 把 CMSampleBufferRef 转化成 UIImage 的方法,参考自:
  114. * https://stackoverflow.com/questions/19310437/convert-cmsamplebufferref-to-uiimage-with-yuv-color-space
  115. * note1 : SDK要求 colorSpace 为 CGColorSpaceCreateDeviceRGB
  116. * note2 : SDK需要 ARGB 格式的图片
  117. */
  118. - (UIImage *) imageFromSamplePlanerPixelBuffer:(CMSampleBufferRef)sampleBuffer{
  119. @autoreleasepool {
  120. // Get a CMSampleBuffer's Core Video image buffer for the media data
  121. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  122. // Lock the base address of the pixel buffer
  123. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  124. // Get the number of bytes per row for the plane pixel buffer
  125. void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
  126. // Get the number of bytes per row for the plane pixel buffer
  127. size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
  128. // Get the pixel buffer width and height
  129. size_t width = CVPixelBufferGetWidth(imageBuffer);
  130. size_t height = CVPixelBufferGetHeight(imageBuffer);
  131. // Create a device-dependent RGB color space
  132. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  133. // Create a bitmap graphics context with the sample buffer data
  134. CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
  135. bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
  136. // Create a Quartz image from the pixel data in the bitmap graphics context
  137. CGImageRef quartzImage = CGBitmapContextCreateImage(context);
  138. // Unlock the pixel buffer
  139. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  140. // Free up the context and color space
  141. CGContextRelease(context);
  142. CGColorSpaceRelease(colorSpace);
  143. // Create an image object from the Quartz image
  144. UIImage *image = [UIImage imageWithCGImage:quartzImage];
  145. // Release the Quartz image
  146. CGImageRelease(quartzImage);
  147. return (image);
  148. }
  149. }
  150. @end