GPUImageMovieWriter.m 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944
  1. #import "GPUImageMovieWriter.h"
  2. #import "GPUImageContext.h"
  3. #import "GLProgram.h"
  4. #import "GPUImageFilter.h"
  5. static BOOL allowWriteAudio = NO;
  6. NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
  7. (
  8. varying highp vec2 textureCoordinate;
  9. uniform sampler2D inputImageTexture;
  10. void main()
  11. {
  12. gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
  13. }
  14. );
  15. @interface GPUImageMovieWriter ()
  16. {
  17. GLuint movieFramebuffer, movieRenderbuffer;
  18. GLProgram *colorSwizzlingProgram;
  19. GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
  20. GLint colorSwizzlingInputTextureUniform;
  21. GPUImageFramebuffer *firstInputFramebuffer;
  22. CMTime startTime, previousFrameTime, previousAudioTime;
  23. dispatch_queue_t audioQueue, videoQueue;
  24. BOOL audioEncodingIsFinished, videoEncodingIsFinished;
  25. BOOL isRecording;
  26. }
  27. // Movie recording
  28. - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
  29. // Frame rendering
  30. - (void)createDataFBO;
  31. - (void)destroyDataFBO;
  32. - (void)setFilterFBO;
  33. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  34. @end
  35. @implementation GPUImageMovieWriter
  36. @synthesize hasAudioTrack = _hasAudioTrack;
  37. @synthesize encodingLiveVideo = _encodingLiveVideo;
  38. @synthesize shouldPassthroughAudio = _shouldPassthroughAudio;
  39. @synthesize completionBlock;
  40. @synthesize failureBlock;
  41. @synthesize videoInputReadyCallback;
  42. @synthesize audioInputReadyCallback;
  43. @synthesize enabled;
  44. @synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;
  45. @synthesize paused = _paused;
  46. @synthesize movieWriterContext = _movieWriterContext;
  47. @synthesize delegate = _delegate;
  48. #pragma mark -
  49. #pragma mark Initialization and teardown
  50. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
  51. {
  52. return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
  53. }
  54. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
  55. {
  56. if (!(self = [super init]))
  57. {
  58. return nil;
  59. }
  60. _shouldInvalidateAudioSampleWhenDone = NO;
  61. self.enabled = YES;
  62. alreadyFinishedRecording = NO;
  63. videoEncodingIsFinished = NO;
  64. audioEncodingIsFinished = NO;
  65. videoSize = newSize;
  66. movieURL = newMovieURL;
  67. fileType = newFileType;
  68. startTime = kCMTimeInvalid;
  69. _encodingLiveVideo = [[outputSettings objectForKey:@"EncodingLiveVideo"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@"EncodingLiveVideo"] boolValue] : YES;
  70. previousFrameTime = kCMTimeNegativeInfinity;
  71. previousAudioTime = kCMTimeNegativeInfinity;
  72. inputRotation = kGPUImageNoRotation;
  73. _movieWriterContext = [[GPUImageContext alloc] init];
  74. [_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];
  75. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  76. [_movieWriterContext useAsCurrentContext];
  77. if ([GPUImageContext supportsFastTextureUpload])
  78. {
  79. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
  80. }
  81. else
  82. {
  83. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
  84. }
  85. if (!colorSwizzlingProgram.initialized)
  86. {
  87. [colorSwizzlingProgram addAttribute:@"position"];
  88. [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
  89. if (![colorSwizzlingProgram link])
  90. {
  91. NSString *progLog = [colorSwizzlingProgram programLog];
  92. NSLog(@"Program link log: %@", progLog);
  93. NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
  94. NSLog(@"Fragment shader compile log: %@", fragLog);
  95. NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
  96. NSLog(@"Vertex shader compile log: %@", vertLog);
  97. colorSwizzlingProgram = nil;
  98. NSAssert(NO, @"Filter shader link failed");
  99. }
  100. }
  101. colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
  102. colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
  103. colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
  104. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  105. glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
  106. glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
  107. });
  108. [self initializeMovieWithOutputSettings:outputSettings];
  109. return self;
  110. }
  111. - (void)dealloc;
  112. {
  113. [self destroyDataFBO];
  114. #if !OS_OBJECT_USE_OBJC
  115. if( audioQueue != NULL )
  116. {
  117. dispatch_release(audioQueue);
  118. }
  119. if( videoQueue != NULL )
  120. {
  121. dispatch_release(videoQueue);
  122. }
  123. #endif
  124. }
  125. #pragma mark -
  126. #pragma mark Movie recording
  127. - (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
  128. {
  129. isRecording = NO;
  130. self.enabled = YES;
  131. NSError *error = nil;
  132. assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
  133. if (error != nil)
  134. {
  135. NSLog(@"Error: %@", error);
  136. if (failureBlock)
  137. {
  138. failureBlock(error);
  139. }
  140. else
  141. {
  142. if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
  143. {
  144. [self.delegate movieRecordingFailedWithError:error];
  145. }
  146. }
  147. }
  148. // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
  149. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
  150. // use default output settings if none specified
  151. if (outputSettings == nil)
  152. {
  153. NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
  154. [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
  155. [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
  156. [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
  157. outputSettings = settings;
  158. }
  159. // custom output settings specified
  160. else
  161. {
  162. NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
  163. NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
  164. NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
  165. NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
  166. if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) {
  167. NSMutableDictionary *tmp = [outputSettings mutableCopy];
  168. [tmp removeObjectForKey:@"EncodingLiveVideo"];
  169. outputSettings = tmp;
  170. }
  171. }
  172. /*
  173. NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  174. [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
  175. [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
  176. [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
  177. [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
  178. nil];
  179. NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  180. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
  181. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
  182. nil];
  183. NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
  184. [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
  185. [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
  186. [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
  187. [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
  188. [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
  189. [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
  190. */
  191. assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
  192. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  193. // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
  194. NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
  195. [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
  196. [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
  197. nil];
  198. // NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
  199. // nil];
  200. assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
  201. [assetWriter addInput:assetWriterVideoInput];
  202. }
  203. - (void)setEncodingLiveVideo:(BOOL) value
  204. {
  205. _encodingLiveVideo = value;
  206. if (isRecording) {
  207. NSAssert(NO, @"Can not change Encoding Live Video while recording");
  208. }
  209. else
  210. {
  211. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  212. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  213. }
  214. }
  215. - (void)startRecording;
  216. {
  217. alreadyFinishedRecording = NO;
  218. startTime = kCMTimeInvalid;
  219. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  220. if (audioInputReadyCallback == NULL)
  221. {
  222. [assetWriter startWriting];
  223. }
  224. });
  225. isRecording = YES;
  226. // [assetWriter startSessionAtSourceTime:kCMTimeZero];
  227. allowWriteAudio = NO;
  228. }
  229. - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
  230. {
  231. assetWriterVideoInput.transform = orientationTransform;
  232. [self startRecording];
  233. }
  234. - (void)cancelRecording;
  235. {
  236. if (assetWriter.status == AVAssetWriterStatusCompleted)
  237. {
  238. return;
  239. }
  240. isRecording = NO;
  241. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  242. alreadyFinishedRecording = YES;
  243. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  244. {
  245. videoEncodingIsFinished = YES;
  246. [assetWriterVideoInput markAsFinished];
  247. }
  248. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  249. {
  250. audioEncodingIsFinished = YES;
  251. [assetWriterAudioInput markAsFinished];
  252. }
  253. [assetWriter cancelWriting];
  254. });
  255. }
  256. - (void)finishRecording;
  257. {
  258. [self finishRecordingWithCompletionHandler:NULL];
  259. }
  260. - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
  261. {
  262. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  263. isRecording = NO;
  264. if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)
  265. {
  266. if (handler)
  267. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  268. return;
  269. }
  270. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  271. {
  272. videoEncodingIsFinished = YES;
  273. [assetWriterVideoInput markAsFinished];
  274. }
  275. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  276. {
  277. audioEncodingIsFinished = YES;
  278. [assetWriterAudioInput markAsFinished];
  279. }
  280. #if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
  281. // Not iOS 6 SDK
  282. [assetWriter finishWriting];
  283. if (handler)
  284. runAsynchronouslyOnContextQueue(_movieWriterContext,handler);
  285. #else
  286. // iOS 6 SDK
  287. if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
  288. // Running iOS 6
  289. [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];
  290. }
  291. else {
  292. // Not running iOS 6
  293. #pragma clang diagnostic push
  294. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  295. [assetWriter finishWriting];
  296. #pragma clang diagnostic pop
  297. if (handler)
  298. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  299. }
  300. #endif
  301. });
  302. }
  303. - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
  304. {
  305. if (!allowWriteAudio) {
  306. return;
  307. }
  308. if (!isRecording)
  309. {
  310. return;
  311. }
  312. // if (_hasAudioTrack && CMTIME_IS_VALID(startTime))
  313. if (_hasAudioTrack)
  314. {
  315. CFRetain(audioBuffer);
  316. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
  317. if (CMTIME_IS_INVALID(startTime))
  318. {
  319. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  320. if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  321. {
  322. [assetWriter startWriting];
  323. }
  324. [assetWriter startSessionAtSourceTime:currentSampleTime];
  325. startTime = currentSampleTime;
  326. });
  327. }
  328. if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)
  329. {
  330. NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  331. if (_shouldInvalidateAudioSampleWhenDone)
  332. {
  333. CMSampleBufferInvalidate(audioBuffer);
  334. }
  335. CFRelease(audioBuffer);
  336. return;
  337. }
  338. previousAudioTime = currentSampleTime;
  339. //if the consumer wants to do something with the audio samples before writing, let him.
  340. if (self.audioProcessingCallback) {
  341. //need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.
  342. CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);
  343. CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);
  344. AudioBufferList audioBufferList;
  345. CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,
  346. NULL,
  347. &audioBufferList,
  348. sizeof(audioBufferList),
  349. NULL,
  350. NULL,
  351. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
  352. &buffer
  353. );
  354. //passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.
  355. for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
  356. SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
  357. self.audioProcessingCallback(&samples, numSamplesInBuffer);
  358. }
  359. }
  360. // NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
  361. void(^write)() = ^() {
  362. while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {
  363. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];
  364. //NSLog(@"audio waiting...");
  365. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  366. }
  367. if (!assetWriterAudioInput.readyForMoreMediaData)
  368. {
  369. NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  370. }
  371. else if(assetWriter.status == AVAssetWriterStatusWriting)
  372. {
  373. if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])
  374. NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  375. }
  376. else
  377. {
  378. //NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  379. }
  380. if (_shouldInvalidateAudioSampleWhenDone)
  381. {
  382. CMSampleBufferInvalidate(audioBuffer);
  383. }
  384. CFRelease(audioBuffer);
  385. };
  386. // runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  387. if( _encodingLiveVideo )
  388. {
  389. runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  390. }
  391. else
  392. {
  393. write();
  394. }
  395. }
  396. }
  397. - (void)enableSynchronizationCallbacks;
  398. {
  399. if (videoInputReadyCallback != NULL)
  400. {
  401. if( assetWriter.status != AVAssetWriterStatusWriting )
  402. {
  403. [assetWriter startWriting];
  404. }
  405. videoQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.videoReadingQueue", NULL);
  406. [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{
  407. if( _paused )
  408. {
  409. //NSLog(@"video requestMediaDataWhenReadyOnQueue paused");
  410. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  411. usleep(10000);
  412. return;
  413. }
  414. //NSLog(@"video requestMediaDataWhenReadyOnQueue begin");
  415. while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )
  416. {
  417. if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )
  418. {
  419. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  420. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  421. {
  422. videoEncodingIsFinished = YES;
  423. [assetWriterVideoInput markAsFinished];
  424. }
  425. });
  426. }
  427. }
  428. //NSLog(@"video requestMediaDataWhenReadyOnQueue end");
  429. }];
  430. }
  431. if (audioInputReadyCallback != NULL)
  432. {
  433. audioQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioReadingQueue", NULL);
  434. [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{
  435. if( _paused )
  436. {
  437. //NSLog(@"audio requestMediaDataWhenReadyOnQueue paused");
  438. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  439. usleep(10000);
  440. return;
  441. }
  442. //NSLog(@"audio requestMediaDataWhenReadyOnQueue begin");
  443. while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )
  444. {
  445. if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )
  446. {
  447. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  448. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  449. {
  450. audioEncodingIsFinished = YES;
  451. [assetWriterAudioInput markAsFinished];
  452. }
  453. });
  454. }
  455. }
  456. //NSLog(@"audio requestMediaDataWhenReadyOnQueue end");
  457. }];
  458. }
  459. }
  460. #pragma mark -
  461. #pragma mark Frame rendering
  462. - (void)createDataFBO;
  463. {
  464. glActiveTexture(GL_TEXTURE1);
  465. glGenFramebuffers(1, &movieFramebuffer);
  466. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  467. if ([GPUImageContext supportsFastTextureUpload])
  468. {
  469. // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
  470. CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);
  471. /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion
  472. * regardless of the kCVImageBufferYCbCrMatrixKey value.
  473. * Tagging the resulting video file as BT.601, is the best option right now.
  474. * Creating a proper BT.709 video is not possible at the moment.
  475. */
  476. CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  477. CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
  478. CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  479. CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,
  480. NULL, // texture attributes
  481. GL_TEXTURE_2D,
  482. GL_RGBA, // opengl format
  483. (int)videoSize.width,
  484. (int)videoSize.height,
  485. GL_BGRA, // native iOS format
  486. GL_UNSIGNED_BYTE,
  487. 0,
  488. &renderTexture);
  489. glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
  490. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  491. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  492. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
  493. }
  494. else
  495. {
  496. glGenRenderbuffers(1, &movieRenderbuffer);
  497. glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
  498. glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);
  499. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
  500. }
  501. GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
  502. NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
  503. }
  504. - (void)destroyDataFBO;
  505. {
  506. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  507. [_movieWriterContext useAsCurrentContext];
  508. if (movieFramebuffer)
  509. {
  510. glDeleteFramebuffers(1, &movieFramebuffer);
  511. movieFramebuffer = 0;
  512. }
  513. if (movieRenderbuffer)
  514. {
  515. glDeleteRenderbuffers(1, &movieRenderbuffer);
  516. movieRenderbuffer = 0;
  517. }
  518. if ([GPUImageContext supportsFastTextureUpload])
  519. {
  520. if (renderTexture)
  521. {
  522. CFRelease(renderTexture);
  523. }
  524. if (renderTarget)
  525. {
  526. CVPixelBufferRelease(renderTarget);
  527. }
  528. }
  529. });
  530. }
  531. - (void)setFilterFBO;
  532. {
  533. if (!movieFramebuffer)
  534. {
  535. [self createDataFBO];
  536. }
  537. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  538. glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
  539. }
  540. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  541. {
  542. [_movieWriterContext useAsCurrentContext];
  543. [self setFilterFBO];
  544. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  545. glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
  546. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  547. // This needs to be flipped to write out to video correctly
  548. static const GLfloat squareVertices[] = {
  549. -1.0f, -1.0f,
  550. 1.0f, -1.0f,
  551. -1.0f, 1.0f,
  552. 1.0f, 1.0f,
  553. };
  554. const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];
  555. glActiveTexture(GL_TEXTURE4);
  556. glBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);
  557. glUniform1i(colorSwizzlingInputTextureUniform, 4);
  558. // NSLog(@"Movie writer framebuffer: %@", inputFramebufferToUse);
  559. glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  560. glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  561. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  562. glFinish();
  563. }
  564. #pragma mark -
  565. #pragma mark GPUImageInput protocol
  566. - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
  567. {
  568. if (!isRecording)
  569. {
  570. [firstInputFramebuffer unlock];
  571. return;
  572. }
  573. // Drop frames forced by images and other things with no time constants
  574. // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case
  575. if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) )
  576. {
  577. [firstInputFramebuffer unlock];
  578. return;
  579. }
  580. if (CMTIME_IS_INVALID(startTime))
  581. {
  582. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  583. if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  584. {
  585. [assetWriter startWriting];
  586. }
  587. [assetWriter startSessionAtSourceTime:frameTime];
  588. startTime = frameTime;
  589. });
  590. }
  591. GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;
  592. glFinish();
  593. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  594. if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)
  595. {
  596. [inputFramebufferForBlock unlock];
  597. NSLog(@"1: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  598. return;
  599. }
  600. // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
  601. [_movieWriterContext useAsCurrentContext];
  602. [self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];
  603. CVPixelBufferRef pixel_buffer = NULL;
  604. if ([GPUImageContext supportsFastTextureUpload])
  605. {
  606. pixel_buffer = renderTarget;
  607. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  608. }
  609. else
  610. {
  611. CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
  612. if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
  613. {
  614. CVPixelBufferRelease(pixel_buffer);
  615. return;
  616. }
  617. else
  618. {
  619. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  620. GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
  621. glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
  622. }
  623. }
  624. void(^write)() = ^() {
  625. while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
  626. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
  627. // NSLog(@"video waiting...");
  628. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  629. }
  630. if (!assetWriterVideoInput.readyForMoreMediaData)
  631. {
  632. NSLog(@"2: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  633. }
  634. else if(self.assetWriter.status == AVAssetWriterStatusWriting)
  635. {
  636. if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
  637. NSLog(@"Problem appending pixel buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  638. allowWriteAudio = YES;
  639. }
  640. else
  641. {
  642. NSLog(@"Couldn't write a frame");
  643. //NSLog(@"Wrote a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  644. }
  645. CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
  646. previousFrameTime = frameTime;
  647. if (![GPUImageContext supportsFastTextureUpload])
  648. {
  649. CVPixelBufferRelease(pixel_buffer);
  650. }
  651. };
  652. write();
  653. [inputFramebufferForBlock unlock];
  654. });
  655. }
  656. - (NSInteger)nextAvailableTextureIndex;
  657. {
  658. return 0;
  659. }
  660. - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
  661. {
  662. [newInputFramebuffer lock];
  663. // runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  664. firstInputFramebuffer = newInputFramebuffer;
  665. // });
  666. }
  667. - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
  668. {
  669. inputRotation = newInputRotation;
  670. }
  671. - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
  672. {
  673. }
  674. - (CGSize)maximumOutputSize;
  675. {
  676. return videoSize;
  677. }
  678. - (void)endProcessing
  679. {
  680. if (completionBlock)
  681. {
  682. if (!alreadyFinishedRecording)
  683. {
  684. alreadyFinishedRecording = YES;
  685. completionBlock();
  686. }
  687. }
  688. else
  689. {
  690. if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])
  691. {
  692. [_delegate movieRecordingCompleted];
  693. }
  694. }
  695. }
  696. - (BOOL)shouldIgnoreUpdatesToThisTarget;
  697. {
  698. return NO;
  699. }
  700. - (BOOL)wantsMonochromeInput;
  701. {
  702. return NO;
  703. }
  704. - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
  705. {
  706. }
  707. #pragma mark -
  708. #pragma mark Accessors
  709. - (void)setHasAudioTrack:(BOOL)newValue
  710. {
  711. [self setHasAudioTrack:newValue audioSettings:nil];
  712. }
  713. - (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
  714. {
  715. _hasAudioTrack = newValue;
  716. if (_hasAudioTrack)
  717. {
  718. if (_shouldPassthroughAudio)
  719. {
  720. // Do not set any settings so audio will be the same as passthrough
  721. audioOutputSettings = nil;
  722. }
  723. else if (audioOutputSettings == nil)
  724. {
  725. AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];
  726. double preferredHardwareSampleRate;
  727. if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])
  728. {
  729. preferredHardwareSampleRate = [sharedAudioSession sampleRate];
  730. }
  731. else
  732. {
  733. #pragma clang diagnostic push
  734. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  735. preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
  736. #pragma clang diagnostic pop
  737. }
  738. AudioChannelLayout acl;
  739. bzero( &acl, sizeof(acl));
  740. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  741. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  742. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
  743. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  744. [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
  745. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  746. //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
  747. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  748. nil];
  749. /*
  750. AudioChannelLayout acl;
  751. bzero( &acl, sizeof(acl));
  752. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  753. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  754. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
  755. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  756. [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
  757. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  758. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  759. nil];*/
  760. }
  761. assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
  762. [assetWriter addInput:assetWriterAudioInput];
  763. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  764. }
  765. else
  766. {
  767. // Remove audio track if it exists
  768. }
  769. }
  770. - (NSArray*)metaData {
  771. return assetWriter.metadata;
  772. }
  773. - (void)setMetaData:(NSArray*)metaData {
  774. assetWriter.metadata = metaData;
  775. }
  776. - (CMTime)duration {
  777. if( ! CMTIME_IS_VALID(startTime) )
  778. return kCMTimeZero;
  779. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )
  780. return CMTimeSubtract(previousFrameTime, startTime);
  781. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )
  782. return CMTimeSubtract(previousAudioTime, startTime);
  783. return kCMTimeZero;
  784. }
  785. - (CGAffineTransform)transform {
  786. return assetWriterVideoInput.transform;
  787. }
  788. - (void)setTransform:(CGAffineTransform)transform {
  789. assetWriterVideoInput.transform = transform;
  790. }
  791. - (AVAssetWriter*)assetWriter {
  792. return assetWriter;
  793. }
  794. @end