JXCaptureMedia.m 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894
  1. #import "JXCaptureMedia.h"
  2. #import "UIImage-Extensions.h"
  3. @implementation JXCaptureMedia
  4. dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
  5. @synthesize captureSession = _capSession,logoImage,saveVideoToImage,curFlashMode,labelTime,logoRect;
  6. @synthesize isRecording=_isRecording,audioSampleRate,audioEncodeBitRate,videoWidth,videoHeight,videoEncodeBitRate,videoFrames,audioChannels,isRecordAudio,isFrontFace,previewRect,previewLayer=_prevLayer,outputFileName,referenceOrientation,videoOrientation,isEditVideo,isOnlySaveFirstImage,outputImageFiles;
  7. /*
  8. 1.读一个文件进行转换 ok
  9. 2.镜头转换 ok
  10. 3.叠图或文字 ok
  11. 4.抓图保存为文件 ok
  12. 5.补光灯,可以选择自动、打开、关闭三种状态;ok
  13. 6.剪裁图片 ok
  14. 7.录像中止或被中止的事件 ok
  15. 8.检测是否有录制设备 ok
  16. 9.输入输出参数化 ok
  17. 10.支持双音频输入混音为一条单音轨
  18. 11.内存释放 ok
  19. 12.显示时间 ok
  20. */
  21. #pragma mark -
  22. #pragma mark Initialization
  23. - (id)init {
  24. NSLog(@"JXCaptureMedia.init");
  25. self = [super init];
  26. if (self) {
  27. /*We initialize some variables (they might be not initialized depending on what is commented or not)*/
  28. _imageView = nil;
  29. _prevLayer = nil;
  30. _customLayer = nil;
  31. curFlashMode = AVCaptureFlashModeAuto;
  32. audioSampleRate = 22050*2;//音频采样率
  33. audioEncodeBitRate = 32*1000;//32Kbps
  34. audioChannels = 1;
  35. videoEncodeBitRate = 300*1000;//300Kbps
  36. videoFrames = 15;
  37. videoHeight = 480;
  38. videoWidth = 480;
  39. saveVideoToImage = 0;
  40. isRecordAudio = 1;
  41. _isPaused = 0;
  42. _isSendEnd = 0;
  43. isEditVideo = 0;
  44. isFrontFace = 1;
  45. referenceOrientation = AVCaptureVideoOrientationPortrait;
  46. _startSessionTime.value = 0;
  47. outputImageFiles = [[NSMutableArray alloc]init];
  48. }
  49. return self;
  50. }
  51. - (void)dealloc {
  52. NSLog(@"JXCaptureMedia.dealloc");
  53. // [_capSession release];
  54. // [_captureVideo release];
  55. // [_captureAudio release];
  56. // [_prevLayer release];
  57. // [outputFileName release];
  58. [outputImageFiles removeAllObjects];
  59. // [outputImageFiles release];
  60. //
  61. // [super dealloc];
  62. }
  63. - (BOOL)initCapture {
  64. /*We setup the input*/
  65. if([self cameraCount]<=0){
  66. [g_App performSelector:@selector(showAlert:) withObject:Localized(@"JXAlert_NoCenmar") afterDelay:1];
  67. // [g_App showAlert:@"没有摄像头"];
  68. return 0;
  69. }
  70. _capSession = [[AVCaptureSession alloc] init];
  71. [_capSession beginConfiguration];
  72. if(isFrontFace)
  73. _deviceVideo = [AVCaptureDeviceInput
  74. deviceInputWithDevice:[self frontFacingCamera]
  75. error:nil];
  76. else
  77. _deviceVideo = [AVCaptureDeviceInput
  78. deviceInputWithDevice:[self backFacingCamera]
  79. error:nil];
  80. if(!_deviceVideo){
  81. [g_App performSelector:@selector(showAlert:) withObject:Localized(@"JX_CanNotopenCenmar") afterDelay:1];
  82. // [g_App showAlert:@"无法打开摄像头,请确定在隐私->相机设置中打开了权限"];
  83. return 0;
  84. }
  85. AVCaptureDevicePosition position = [[_deviceVideo device] position];
  86. isFrontFace = position == AVCaptureDevicePositionFront;
  87. NSLog(@"isFrontFace=%d",isFrontFace);
  88. /*We setupt the output*/
  89. _captureVideo = [[AVCaptureVideoDataOutput alloc] init];
  90. /*While a frame is processes in -captureOutput:didOutputSampleBuffer:fromConnection: delegate methods no other frames are added in the queue.
  91. If you don't want this behaviour set the property to NO */
  92. _captureVideo.alwaysDiscardsLateVideoFrames = YES;
  93. /*We specify a minimum duration for each frame (play with this settings to avoid having too many frames waiting
  94. in the queue because it can cause memory issues). It is similar to the inverse of the maximum framerate.
  95. In this example we set a min frame duration of 1/10 seconds so a maximum framerate of 10fps. We say that
  96. we are not able to process more than 10 frames per second.*/
  97. _captureVideo.minFrameDuration = CMTimeMake(1, videoFrames);
  98. NSLog(@"videoEncodeBitRate=%d,%d",videoFrames,videoEncodeBitRate);
  99. /*We create a serial queue to handle the processing of our frames*/
  100. // Set the video output to store frame in BGRA (It is supposed to be faster)
  101. /* NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
  102. NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
  103. NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];*/
  104. NSDictionary * videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
  105. [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],(NSString*)kCVPixelBufferPixelFormatTypeKey,
  106. [NSNumber numberWithInt:videoWidth], (id)kCVPixelBufferWidthKey,
  107. [NSNumber numberWithInt:videoHeight], (id)kCVPixelBufferHeightKey,
  108. nil];
  109. [_captureVideo setVideoSettings:videoSettings];
  110. /*And we create a capture session*/
  111. dispatch_queue_t queueVideo;
  112. queueVideo = dispatch_queue_create("queueVideo", DISPATCH_QUEUE_SERIAL);
  113. [_captureVideo setSampleBufferDelegate:self queue:queueVideo];
  114. // dispatch_release(queueVideo);
  115. [_capSession setSessionPreset:AVCaptureSessionPresetMedium];
  116. /*We add input and output*/
  117. [_capSession addInput:_deviceVideo];
  118. [_capSession addOutput:_captureVideo];
  119. /*We use medium quality, ont the iPhone 4 this demo would be laging too much, the conversion in UIImage and CGImage demands too much ressources for a 720p resolution.*/
  120. //音频:
  121. if(isRecordAudio){
  122. dispatch_queue_t queueAudio;
  123. queueAudio = dispatch_queue_create("queueAudio", NULL);
  124. _deviceAudio = [AVCaptureDeviceInput
  125. deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]
  126. error:nil];
  127. if(!_deviceAudio){
  128. [g_App performSelector:@selector(showAlert:) withObject:Localized(@"JX_CanNotOpenMicr") afterDelay:1];
  129. return 0;
  130. }
  131. _captureAudio = [[AVCaptureAudioDataOutput alloc] init];
  132. // [_captureAudio setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  133. [_captureAudio setSampleBufferDelegate:self queue:queueAudio];
  134. [_capSession addInput:_deviceAudio];
  135. [_capSession addOutput:_captureAudio];
  136. // dispatch_release(queueAudio);
  137. }
  138. [_capSession commitConfiguration];
  139. int temp;
  140. for(int i=0;i<[[_captureVideo connections] count];i++){
  141. AVCaptureConnection* p = [[_captureVideo connections] objectAtIndex:i];
  142. // NSLog(@"p=%d,%d" ,p.videoOrientation,p.supportsVideoOrientation);
  143. temp = p.videoOrientation;
  144. // NSLog(@"p=%f,%d",p.videoMinFrameDuration.value/p.videoMinFrameDuration.timescale,p.videoOrientation);
  145. }
  146. self.videoOrientation = temp;
  147. // [self createNotify];
  148. NSLog(@"initCapture");
  149. return 1;
  150. }
  151. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
  152. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  153. for ( AVCaptureDevice *device in devices )
  154. if ( device.position == position ){
  155. return device;
  156. }
  157. return nil;
  158. }
  159. -(BOOL)createPreview:(UIView*)parentView{
  160. BOOL b = [self initCapture];
  161. if(!b)
  162. return b;
  163. if (_maxTime<=0) {
  164. _maxTime = 60;
  165. }
  166. _prevLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession: _capSession];
  167. if(previewRect.size.height == 0 && previewRect.size.width==0)
  168. _prevLayer.frame = parentView.bounds;
  169. else
  170. _prevLayer.frame = previewRect;
  171. // _prevLayer.frame = CGRectMake(0, (JX_SCREEN_HEIGHT-JX_SCREEN_WIDTH)/2, JX_SCREEN_WIDTH, JX_SCREEN_WIDTH);
  172. _prevLayer.frame = CGRectMake(0, 0, JX_SCREEN_WIDTH, JX_SCREEN_HEIGHT);
  173. _prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  174. parentView.layer.masksToBounds = YES;
  175. // [parentView.layer addSublayer: _prevLayer];
  176. [parentView.layer insertSublayer:_prevLayer below:[[parentView.layer sublayers] objectAtIndex:0]];
  177. [_capSession startRunning];
  178. return b;
  179. }
  180. #pragma mark -
  181. #pragma mark AVCaptureSession delegate
  182. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  183. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  184. fromConnection:(AVCaptureConnection *)connection
  185. {
  186. /*We create an autorelease pool because as we are not in the main_queue our code is
  187. not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/
  188. // a very dense way to keep track of the time at which this frame
  189. // occurs relative to the output stream, but it's just an example!
  190. if(_isRecording && !_isPaused){
  191. CMTime t = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
  192. NSLog(@"%lld,%d",t.value,t.timescale);
  193. if(_startSessionTime.value == 0){
  194. // if( _writer.status == AVAssetWriterStatusUnknown && _startSessionTime.value == 0){
  195. _startSessionTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  196. [_writer startWriting];
  197. [_writer startSessionAtSourceTime:_startSessionTime];
  198. NSLog(@"start=%lld,%d",t.value,t.timescale);
  199. return;
  200. }
  201. if( _writer.status <= AVAssetWriterStatusWriting )
  202. {
  203. if(captureOutput == _captureVideo)
  204. if(_videoInput.readyForMoreMediaData){
  205. _writeVideoCount++;
  206. [self showRecordTime:sampleBuffer];
  207. if(_adaptor){
  208. // [_adaptor appendPixelBuffer:[self cutPixelBuffer:sampleBuffer] withPresentationTime:t];
  209. }else{
  210. if(isEditVideo){
  211. [self changeSample:sampleBuffer];
  212. // [self cutSampleBuffer:sampleBuffer];
  213. // [self cutPixelBuffer:sampleBuffer];
  214. }
  215. if( _writer.status <= AVAssetWriterStatusWriting )
  216. [_videoInput appendSampleBuffer:sampleBuffer];
  217. }
  218. }
  219. if(captureOutput == _captureAudio)
  220. if(_audioInput.readyForMoreMediaData){
  221. // CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds(_lastTime,30));
  222. [_audioInput appendSampleBuffer:sampleBuffer];
  223. _writeAudioCount++;
  224. NSLog(@"audio");
  225. }
  226. }
  227. }
  228. }
  229. -(void)showRecordTime:(CMSampleBufferRef)sampleBuffer {
  230. CMTime t = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
  231. t = CMTimeSubtract(t,_startSessionTime);
  232. NSInteger m = (t.value/t.timescale)/60;
  233. NSInteger n = (t.value/t.timescale)%60;
  234. self.timeLen = t.value/t.timescale;
  235. NSString * labelTimeStr;
  236. if (!_isReciprocal) {
  237. labelTimeStr = [NSString stringWithFormat:@"%.2ld:%.2ld",m,n];
  238. }else{
  239. if(_maxTime){
  240. NSInteger maxM = (_maxTime-self.timeLen)/60;
  241. NSInteger maxN = (_maxTime-self.timeLen)%60;
  242. labelTimeStr = [NSString stringWithFormat:@"%ld:%.2ld",maxM,maxN];
  243. }
  244. }
  245. if(labelTime){
  246. if( ![_lastShowTime isEqualToString:labelTimeStr] ){
  247. [labelTime performSelectorOnMainThread:@selector(setText:) withObject:labelTimeStr waitUntilDone:YES];
  248. // [_lastShowTime release];
  249. _lastShowTime = labelTimeStr;
  250. // [_lastShowTime retain];
  251. }
  252. // if(self.timeLen >= self.maxTime && !_isSendEnd){
  253. // _isSendEnd = YES;
  254. // [g_notify postNotificationName:kVideoRecordEndNotifaction object:self userInfo:nil];
  255. // }
  256. }
  257. if(self.timeLen >= self.maxTime && !_isSendEnd){
  258. _isSendEnd = YES;
  259. [g_notify postNotificationName:kVideoRecordEndNotifaction object:self userInfo:nil];
  260. }
  261. }
  262. -(void)changeSample:(CMSampleBufferRef)sampleBuffer {
  263. @autoreleasepool{
  264. // NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
  265. /*Lock the image buffer*/
  266. CVImageBufferRef imageBuffer=NULL;
  267. imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  268. CVPixelBufferLockBaseAddress(imageBuffer,0);
  269. /*Get information about the image*/
  270. uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
  271. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  272. size_t width = CVPixelBufferGetWidth(imageBuffer);
  273. size_t height = CVPixelBufferGetHeight(imageBuffer);
  274. /*Create a CGImageRef from the CVImageBufferRef*/
  275. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  276. CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  277. CGImageRef newImage = CGBitmapContextCreateImage(newContext);
  278. if(logoImage)
  279. CGContextDrawImage(newContext,logoRect,logoImage.CGImage);
  280. if(saveVideoToImage)
  281. [self saveToImage:sampleBuffer newImage:newImage];
  282. //[_customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
  283. //[_imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
  284. CGContextRelease(newContext);
  285. CGColorSpaceRelease(colorSpace);
  286. CGImageRelease(newImage);
  287. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  288. // [pool drain];
  289. }
  290. }
  291. -(void)saveToImage:(CMSampleBufferRef)sampleBuffer newImage:(CGImageRef)newImage{
  292. // CMTime n = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  293. CMTime n = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
  294. n = CMTimeSubtract(n,_startSessionTime);
  295. NSInteger m = n.value / n.timescale;
  296. BOOL isSave;
  297. isSave = m % saveVideoToImage == 0 || m==0;
  298. if(isSave && _saveCount<m && m<_maxTime){
  299. _saveCount = m;
  300. NSString* s;
  301. if(isOnlySaveFirstImage){
  302. s = [NSString stringWithFormat:@"%@.jpg", [outputFileName stringByDeletingPathExtension]];
  303. if(m/saveVideoToImage > 2)
  304. return;
  305. }
  306. else
  307. s = [NSString stringWithFormat:@"%@_%d.jpg", [outputFileName stringByDeletingPathExtension],m];
  308. CGRect r;
  309. size_t n;
  310. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  311. _sampleWidth = CVPixelBufferGetWidth(imageBuffer);
  312. _sampleHeight = CVPixelBufferGetHeight(imageBuffer);
  313. if(_sampleWidth<_sampleHeight){
  314. n = _sampleWidth;
  315. r = CGRectMake(0, (_sampleHeight-_sampleWidth)/2, n, n);
  316. }
  317. else{
  318. n = _sampleHeight;
  319. r = CGRectMake((_sampleWidth-_sampleHeight)/2, 0, n, n);
  320. }
  321. UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
  322. image = [image imageAtRect:r];
  323. image = [image imageRotatedByDegrees:90];
  324. NSData* data = UIImageJPEGRepresentation(image,1);
  325. NSLog(@"saveToImage:%@",s);
  326. [data writeToFile:s atomically:YES];
  327. [outputImageFiles addObject:s];
  328. image = nil;
  329. data = nil;
  330. }
  331. }
  332. #pragma mark -
  333. #pragma mark Memory management
  334. - (void)viewDidUnload {
  335. _imageView = nil;
  336. _customLayer = nil;
  337. _prevLayer = nil;
  338. }
  339. - (BOOL) createWriter
  340. {
  341. if(videoEncodeBitRate<=100)
  342. videoEncodeBitRate = 300*1000;
  343. if(videoFrames<=10)
  344. videoFrames = 15;
  345. NSLog(@"videoEncodeBitRate=%d,%d",videoFrames,videoEncodeBitRate);
  346. // NSString *file = [self file];
  347. if(outputFileName==nil)
  348. outputFileName = [ docFilePath stringByAppendingPathComponent:@"1.mp4"];
  349. if ([[NSFileManager defaultManager] fileExistsAtPath:outputFileName])
  350. [[NSFileManager defaultManager] removeItemAtPath:outputFileName error:NULL];
  351. NSError *error = nil;
  352. _writer = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:outputFileName] fileType:AVFileTypeMPEG4 error:&error];
  353. if (error)
  354. {
  355. _writer = nil;
  356. NSLog(@"%@", error);
  357. return NO;
  358. }
  359. NSDictionary *settings;
  360. if(isRecordAudio){
  361. AudioChannelLayout acl;
  362. bzero( &acl, sizeof(acl));
  363. if(audioChannels>=2)
  364. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  365. else
  366. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  367. settings = [NSDictionary dictionaryWithObjectsAndKeys:
  368. [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
  369. [NSNumber numberWithFloat:audioSampleRate], AVSampleRateKey,
  370. [NSNumber numberWithInt:audioChannels], AVNumberOfChannelsKey,
  371. [NSNumber numberWithInt:audioEncodeBitRate], AVEncoderBitRateKey,
  372. [NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey,
  373. nil ];
  374. _audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:settings];
  375. _audioInput.expectsMediaDataInRealTime = YES;
  376. [_writer addInput:_audioInput];
  377. }
  378. NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  379. [NSNumber numberWithInt:videoEncodeBitRate], AVVideoAverageBitRateKey,
  380. [NSNumber numberWithInt:videoFrames],AVVideoMaxKeyFrameIntervalKey,
  381. AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
  382. nil];
  383. settings = [NSDictionary dictionaryWithObjectsAndKeys:
  384. AVVideoCodecH264, AVVideoCodecKey,
  385. [NSNumber numberWithInt:((int)videoWidth/16)*16], AVVideoWidthKey,
  386. [NSNumber numberWithInt:((int)videoHeight/16)*16], AVVideoHeightKey,
  387. [NSString stringWithString:AVVideoScalingModeResizeAspectFill], AVVideoScalingModeKey,
  388. codecSettings,AVVideoCompressionPropertiesKey,
  389. nil];
  390. _videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
  391. _videoInput.expectsMediaDataInRealTime = YES;
  392. [_writer addInput:_videoInput];
  393. _writer.shouldOptimizeForNetworkUse = YES;
  394. _videoInput.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation];
  395. _videoInput.transform = CGAffineTransformMakeRotation(M_PI/2);
  396. // _adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoInput
  397. // sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
  398. // nil]];
  399. _adaptor = nil;
  400. NSLog(@"createWriter");
  401. return YES;
  402. }
  403. - (void) deleteWriter
  404. {
  405. // [_videoInput release];
  406. _videoInput = nil;
  407. // [_audioInput release];
  408. _audioInput = nil;
  409. // [_writer release];
  410. _writer = nil;
  411. }
  412. -(void) start
  413. {
  414. if( !_isRecording )
  415. {
  416. _saveCount = -1;
  417. _startSessionTime.value = 0;
  418. if( _writer == nil){
  419. if( ![self createWriter] ) {
  420. NSLog(@"Setup Writer Failed") ;
  421. return;
  422. }
  423. }
  424. if(!_capSession.running)
  425. [_capSession startRunning];
  426. _isRecording = YES;
  427. NSLog(@"start video recording...");
  428. }
  429. }
  430. -(void) stop
  431. {
  432. if( _isRecording )
  433. {
  434. _isRecording = NO;
  435. // [_capSession stopRunning] ;
  436. [_videoInput markAsFinished];
  437. [_audioInput markAsFinished];
  438. if(![_writer finishWriting]) {
  439. NSLog(@"finishWriting returned NO") ;
  440. }
  441. _videoInput = nil;
  442. _audioInput = nil;
  443. _writer = nil;
  444. _startSessionTime.value = 0;
  445. NSLog(@"video recording stopped:%d frames,%d audios",_writeVideoCount,_writeAudioCount);
  446. }
  447. }
  448. -(void)setting{
  449. AVCaptureConnection *videoConnection = NULL;
  450. [_capSession beginConfiguration];
  451. for ( AVCaptureConnection *connection in [_captureVideo connections] )
  452. {
  453. for ( AVCaptureInputPort *port in [connection inputPorts] )
  454. {
  455. if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
  456. {
  457. videoConnection = connection;
  458. }
  459. }
  460. }
  461. if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
  462. {
  463. [videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
  464. }
  465. [_capSession commitConfiguration];
  466. }
  467. //-(void)clearQueue{
  468. // dispatch_queue_t queue = dispatch_queue_create("queueVideo", NULL);
  469. // dispatch_set_context(queue, (__bridge void * _Nullable)(self));
  470. // dispatch_set_finalizer_f(queue, _captureVideo);
  471. // [_captureVideo setSampleBufferDelegate: self queue: queue];
  472. //// dispatch_release(queue);
  473. //}
  474. // Toggle between the front and back camera, if both are present.
  475. - (BOOL) toggleCamera
  476. {
  477. BOOL success = NO;
  478. if ([self cameraCount] > 1) {
  479. NSError *error;
  480. AVCaptureDeviceInput *newVideoInput=nil;
  481. AVCaptureDevicePosition position = [[_deviceVideo device] position];
  482. if (position == AVCaptureDevicePositionBack){
  483. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:&error];
  484. isFrontFace = YES;
  485. }
  486. else if (position == AVCaptureDevicePositionFront){
  487. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
  488. isFrontFace = NO;
  489. }
  490. if (newVideoInput != nil) {
  491. [_capSession beginConfiguration];
  492. [_capSession removeInput:_deviceVideo];
  493. if ([_capSession canAddInput:newVideoInput])
  494. [_capSession addInput:newVideoInput];
  495. else
  496. [_capSession addInput:_deviceVideo];
  497. [_capSession commitConfiguration];
  498. success = YES;
  499. // [newVideoInput release];
  500. _deviceVideo = newVideoInput;
  501. }
  502. }
  503. return success;
  504. }
  505. #pragma mark Device Counts
  506. - (NSUInteger) cameraCount
  507. {
  508. return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
  509. }
  510. - (NSUInteger) micCount
  511. {
  512. return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] count];
  513. }
  514. // Find a camera with the specificed AVCaptureDevicePosition, returning nil if one is not found
  515. // Find a front facing camera, returning nil if one is not found
  516. - (AVCaptureDevice *) frontFacingCamera
  517. {
  518. return [self cameraWithPosition:AVCaptureDevicePositionFront];
  519. }
  520. // Find a back facing camera, returning nil if one is not found
  521. - (AVCaptureDevice *) backFacingCamera
  522. {
  523. return [self cameraWithPosition:AVCaptureDevicePositionBack];
  524. }
  525. // Find and return an audio device, returning nil if one is not found
  526. - (AVCaptureDevice *) audioDevice
  527. {
  528. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  529. if ([devices count] > 0) {
  530. return [devices objectAtIndex:0];
  531. }
  532. return nil;
  533. }
  534. -(void)createNotify{
  535. [g_notify addObserver: self selector: @selector(onVideoError:) name: AVCaptureSessionRuntimeErrorNotification object: _capSession];
  536. [g_notify addObserver: self selector: @selector(onVideoInterrupted:) name: AVCaptureSessionWasInterruptedNotification object: _capSession];
  537. }
  538. -(void)onVideoError:(AVCaptureSession*)cap{
  539. [self stop];
  540. }
  541. -(void)onVideoInterrupted:(AVCaptureSession*)cap{
  542. [self stop];
  543. }
  544. -(void)setFlashMode:(AVCaptureFlashMode)n{
  545. AVCaptureDevice* device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  546. if(device.hasFlash){
  547. [device lockForConfiguration:nil];
  548. curFlashMode = n;
  549. device.torchMode = n;
  550. device.flashMode = n;
  551. [device unlockForConfiguration];
  552. }
  553. }
  554. -(BOOL) pause{
  555. _isPaused = YES;
  556. }
  557. -(BOOL) play{
  558. _isPaused = NO;
  559. }
  560. - (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation
  561. {
  562. CGFloat angle = 0.0;
  563. switch (orientation) {
  564. case AVCaptureVideoOrientationPortrait:
  565. angle = 0.0;
  566. break;
  567. case AVCaptureVideoOrientationPortraitUpsideDown:
  568. angle = M_PI;
  569. break;
  570. case AVCaptureVideoOrientationLandscapeRight:
  571. angle = -M_PI_2;
  572. break;
  573. case AVCaptureVideoOrientationLandscapeLeft:
  574. angle = M_PI_2;
  575. break;
  576. default:
  577. break;
  578. }
  579. return angle;
  580. }
  581. - (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation
  582. {
  583. CGAffineTransform transform = CGAffineTransformIdentity;
  584. // Calculate offsets from an arbitrary reference orientation (portrait)
  585. CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation];
  586. CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.videoOrientation];
  587. // Find the difference in angle between the passed in orientation and the current video orientation
  588. CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
  589. transform = CGAffineTransformMakeRotation(angleOffset);
  590. return transform;
  591. }
  592. - (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
  593. {
  594. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  595. return [self cgImageFromImageBuffer:imageBuffer];
  596. }
  597. - (CGImageRef)cgImageFromImageBuffer:(CVImageBufferRef) imageBuffer // Create a CGImageRef from sample buffer data
  598. {
  599. CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
  600. uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
  601. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  602. _sampleWidth = CVPixelBufferGetWidth(imageBuffer);
  603. _sampleHeight = CVPixelBufferGetHeight(imageBuffer);
  604. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  605. CGContextRef newContext = CGBitmapContextCreate(baseAddress, _sampleWidth, _sampleHeight, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  606. CGImageRef newImage = CGBitmapContextCreateImage(newContext);
  607. CGContextRelease(newContext);
  608. CGColorSpaceRelease(colorSpace);
  609. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  610. return newImage;
  611. }
  612. - (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
  613. CIImage *theCoreImage = [CIImage imageWithCGImage:imageRef];
  614. CFDictionaryRef empty; // empty value for attr value.
  615. CFMutableDictionaryRef attrs;
  616. empty = CFDictionaryCreate(kCFAllocatorDefault, // our empty IOSurface properties dictionary
  617. NULL,
  618. NULL,
  619. 0,
  620. &kCFTypeDictionaryKeyCallBacks,
  621. &kCFTypeDictionaryValueCallBacks);
  622. attrs = CFDictionaryCreateMutable(kCFAllocatorDefault,
  623. 1,
  624. &kCFTypeDictionaryKeyCallBacks,
  625. &kCFTypeDictionaryValueCallBacks);
  626. CFDictionarySetValue(attrs,
  627. kCVPixelBufferIOSurfacePropertiesKey,
  628. empty);
  629. NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
  630. [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
  631. [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
  632. nil];
  633. CVPixelBufferRef pixelBuffer;
  634. OSStatus err = CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)theCoreImage.extent.size.width, (size_t)theCoreImage.extent.size.height, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef) options, &pixelBuffer);
  635. if(err)
  636. NSLog(@"视频失败:CVPixelBufferCreate");
  637. CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
  638. CIContext *ciContext = [CIContext contextWithOptions: nil];
  639. [ciContext render:theCoreImage toCVPixelBuffer:pixelBuffer];
  640. CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
  641. CMSampleTimingInfo sampleTime = {
  642. .duration = CMSampleBufferGetDuration(sampleBuffer),
  643. .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
  644. .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
  645. };
  646. CMVideoFormatDescriptionRef videoInfo = NULL;
  647. CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
  648. CMSampleBufferRef oBuf;
  649. err = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &sampleBuffer);
  650. if(err)
  651. NSLog(@"视频失败:getSampleBufferUsingCIByCGInput");
  652. CVPixelBufferRelease(pixelBuffer);
  653. CFRelease(videoInfo);
  654. return oBuf;
  655. }
  656. - (void)cutSampleBuffer:(CMSampleBufferRef)sampleBuffer{
  657. //return if invalid sample buffer
  658. if (!CMSampleBufferIsValid(sampleBuffer)) {
  659. return;
  660. }
  661. //Get CG Image from sample buffer
  662. CGImageRef fromImage = [self cgImageFromSampleBuffer:sampleBuffer];
  663. if(!fromImage || (fromImage == NULL)){
  664. return;
  665. }
  666. CGRect r;
  667. size_t n;
  668. if(_sampleWidth<_sampleHeight){
  669. n = _sampleWidth;
  670. r = CGRectMake(0, (_sampleHeight-_sampleWidth)/2, n, n);
  671. }
  672. else{
  673. n = _sampleHeight;
  674. r = CGRectMake((_sampleWidth-_sampleHeight)/2, 0, n, n);
  675. }
  676. CGImageRef toImage = CGImageCreateWithImageInRect(fromImage,r);
  677. //Convert back in CMSamplbuffer
  678. // sampleBuffer = [self getSampleBufferUsingCIByCGInput:toImage andProvidedSampleBuffer:sampleBuffer];
  679. [self getSampleBufferUsingCIByCGInput:toImage andProvidedSampleBuffer:sampleBuffer];
  680. //Release data if needed
  681. CGImageRelease(fromImage);
  682. CGImageRelease(toImage);
  683. }
  684. - (CVPixelBufferRef)CVPixelBufferRefFromUiImage:(CGImageRef)image{
  685. size_t height = CGImageGetHeight(image);
  686. size_t width = CGImageGetWidth(image);
  687. NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
  688. [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
  689. [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
  690. CVPixelBufferRef pxbuffer = NULL;
  691. CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
  692. NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
  693. CVPixelBufferLockBaseAddress(pxbuffer, 0);
  694. void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
  695. NSParameterAssert(pxdata != NULL);
  696. CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
  697. CGContextRef context = CGBitmapContextCreate(pxdata, width, height, 8, 4*width, rgbColorSpace, kCGImageAlphaNoneSkipFirst);
  698. NSParameterAssert(context);
  699. CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
  700. CGColorSpaceRelease(rgbColorSpace);
  701. CGContextRelease(context);
  702. CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
  703. return pxbuffer;
  704. }
  705. - (CVPixelBufferRef)cutPixelBuffer:(CMSampleBufferRef)sampleBuffer{
  706. //return if invalid sample buffer
  707. if (!CMSampleBufferIsValid(sampleBuffer)){
  708. return NULL;
  709. }
  710. //Get CG Image from sample buffer
  711. CGImageRef fromImage = [self cgImageFromSampleBuffer:sampleBuffer];
  712. if(!fromImage || (fromImage == NULL)){
  713. return NULL;
  714. }
  715. CGRect r;
  716. size_t n;
  717. if(_sampleWidth<_sampleHeight){
  718. n = _sampleWidth;
  719. r = CGRectMake(0, (_sampleHeight-_sampleWidth)/2, n, n);
  720. }
  721. else{
  722. n = _sampleHeight;
  723. r = CGRectMake((_sampleWidth-_sampleHeight)/2, 0, n, n);
  724. }
  725. CGImageRef toImage = CGImageCreateWithImageInRect(fromImage,r);
  726. //Convert back in CMSamplbuffer
  727. CVPixelBufferRef pxbuffer = [self CVPixelBufferRefFromUiImage:toImage];
  728. CMSampleTimingInfo sampleTime = {
  729. .duration = CMSampleBufferGetDuration(sampleBuffer),
  730. .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
  731. .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
  732. };
  733. CMVideoFormatDescriptionRef videoInfo = NULL;
  734. CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pxbuffer, &videoInfo);
  735. OSStatus err = CMSampleBufferCreateForImageBuffer( kCFAllocatorDefault, pxbuffer, true, NULL, NULL, videoInfo, &sampleTime, &sampleBuffer);
  736. if(err)
  737. NSLog(@"失败:cutPixelBuffer");
  738. //Release data if needed
  739. CGImageRelease(fromImage);
  740. CGImageRelease(toImage);
  741. return pxbuffer;
  742. }
  743. -(void)clearTempFile{
  744. [[NSFileManager defaultManager] removeItemAtPath:outputFileName error:nil];
  745. for(int i=0;i<[outputImageFiles count];i++){
  746. [[NSFileManager defaultManager] removeItemAtPath:[outputImageFiles objectAtIndex:i] error:nil];
  747. }
  748. }
  749. @end