ImageWebPCoder.m 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861
  1. //
  2. // SDImageWebPCoder.m
  3. // WebpiOS
  4. //
  5. // Created by ponted on 2019/5/16.
  6. // Copyright © 2019 Shenzhen Blood Link Medical Technology Co., Ltd. All rights reserved.
  7. //
  8. #import "ImageWebPCoder.h"
  9. #import "SDImageCoderHelper.h"
  10. #if __has_include(<SDWebImage/NSImage+Compatibility.h>)
  11. #import <SDWebImage/NSImage+Compatibility.h>
  12. #endif
  13. #import "UIImage+Metadata.h"
  14. #import "UIImage+ForceDecode.h"
  15. #import <Accelerate/Accelerate.h>
  16. #import "libwebp.framework/Headers/decode.h"
  17. #import "libwebp.framework/Headers/demux.h"
  18. #import "libwebp.framework/Headers/encode.h"
  19. #import "libwebp.framework/Headers/format_constants.h"
  20. #import "libwebp.framework/Headers/mux_types.h"
  21. #import "libwebp.framework/Headers/mux.h"
  22. #import "libwebp.framework/Headers/types.h"
  23. #ifndef SD_LOCK
  24. #define SD_LOCK(lock) dispatch_semaphore_wait(lock, DISPATCH_TIME_FOREVER);
  25. #endif
  26. #ifndef SD_UNLOCK
  27. #define SD_UNLOCK(lock) dispatch_semaphore_signal(lock);
  28. #endif
  29. @interface SDWebPCoderFrame : NSObject
  30. @property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
  31. @property (nonatomic, assign) NSTimeInterval duration; // Frame duration in seconds
  32. @property (nonatomic, assign) NSUInteger width; // Frame width
  33. @property (nonatomic, assign) NSUInteger height; // Frame height
  34. @property (nonatomic, assign) NSUInteger offsetX; // Frame origin.x in canvas (left-bottom based)
  35. @property (nonatomic, assign) NSUInteger offsetY; // Frame origin.y in canvas (left-bottom based)
  36. @property (nonatomic, assign) BOOL hasAlpha; // Whether frame contains alpha
  37. @property (nonatomic, assign) BOOL isFullSize; // Whether frame size is equal to canvas size
  38. @property (nonatomic, assign) BOOL shouldBlend; // Frame dispose method
  39. @property (nonatomic, assign) BOOL shouldDispose; // Frame blend operation
  40. @property (nonatomic, assign) NSUInteger blendFromIndex; // The nearest previous frame index which blend mode is WEBP_MUX_BLEND
  41. @end
  42. @implementation SDWebPCoderFrame
  43. @end
  44. @implementation ImageWebPCoder {
  45. WebPIDecoder *_idec;
  46. WebPDemuxer *_demux;
  47. NSData *_imageData;
  48. CGFloat _scale;
  49. NSUInteger _loopCount;
  50. NSUInteger _frameCount;
  51. NSArray<SDWebPCoderFrame *> *_frames;
  52. CGContextRef _canvas;
  53. CGColorSpaceRef _colorSpace;
  54. BOOL _hasAnimation;
  55. BOOL _hasAlpha;
  56. BOOL _finished;
  57. CGFloat _canvasWidth;
  58. CGFloat _canvasHeight;
  59. dispatch_semaphore_t _lock;
  60. NSUInteger _currentBlendIndex;
  61. }
  62. - (void)dealloc {
  63. if (_idec) {
  64. WebPIDelete(_idec);
  65. _idec = NULL;
  66. }
  67. if (_demux) {
  68. WebPDemuxDelete(_demux);
  69. _demux = NULL;
  70. }
  71. if (_canvas) {
  72. CGContextRelease(_canvas);
  73. _canvas = NULL;
  74. }
  75. if (_colorSpace) {
  76. CGColorSpaceRelease(_colorSpace);
  77. _colorSpace = NULL;
  78. }
  79. }
  80. + (instancetype)sharedCoder {
  81. static ImageWebPCoder *coder;
  82. static dispatch_once_t onceToken;
  83. dispatch_once(&onceToken, ^{
  84. coder = [[ImageWebPCoder alloc] init];
  85. });
  86. return coder;
  87. }
  88. #pragma mark - Decode
  89. - (BOOL)canDecodeFromData:(nullable NSData *)data {
  90. return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
  91. }
  92. - (BOOL)canIncrementalDecodeFromData:(NSData *)data {
  93. return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
  94. }
  95. - (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDImageCoderOptions *)options {
  96. if (!data) {
  97. return nil;
  98. }
  99. WebPData webpData;
  100. WebPDataInit(&webpData);
  101. webpData.bytes = data.bytes;
  102. webpData.size = data.length;
  103. WebPDemuxer *demuxer = WebPDemux(&webpData);
  104. if (!demuxer) {
  105. return nil;
  106. }
  107. uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
  108. BOOL hasAnimation = flags & ANIMATION_FLAG;
  109. BOOL decodeFirstFrame = [options[SDImageCoderDecodeFirstFrameOnly] boolValue];
  110. CGFloat scale = 1;
  111. NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
  112. if (scaleFactor != nil) {
  113. scale = [scaleFactor doubleValue];
  114. if (scale < 1) {
  115. scale = 1;
  116. }
  117. }
  118. // for animated webp image
  119. WebPIterator iter;
  120. // libwebp's index start with 1
  121. if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
  122. WebPDemuxReleaseIterator(&iter);
  123. WebPDemuxDelete(demuxer);
  124. return nil;
  125. }
  126. CGColorSpaceRef colorSpace = [self sd_colorSpaceWithDemuxer:demuxer];
  127. if (!hasAnimation || decodeFirstFrame) {
  128. // first frame for animated webp image
  129. CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpace];
  130. CGColorSpaceRelease(colorSpace);
  131. #if SD_UIKIT || SD_WATCH
  132. UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
  133. #else
  134. UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:kCGImagePropertyOrientationUp];
  135. #endif
  136. firstFrameImage.sd_imageFormat = SDImageFormatWebP;
  137. CGImageRelease(imageRef);
  138. WebPDemuxReleaseIterator(&iter);
  139. WebPDemuxDelete(demuxer);
  140. return firstFrameImage;
  141. }
  142. int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
  143. int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  144. int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  145. BOOL hasAlpha = flags & ALPHA_FLAG;
  146. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
  147. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  148. CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
  149. if (!canvas) {
  150. WebPDemuxDelete(demuxer);
  151. CGColorSpaceRelease(colorSpace);
  152. return nil;
  153. }
  154. NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];
  155. do {
  156. @autoreleasepool {
  157. CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace];
  158. if (!imageRef) {
  159. continue;
  160. }
  161. #if SD_UIKIT || SD_WATCH
  162. UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
  163. #else
  164. UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:kCGImagePropertyOrientationUp];
  165. #endif
  166. CGImageRelease(imageRef);
  167. NSTimeInterval duration = [self sd_frameDurationWithIterator:iter];
  168. SDImageFrame *frame = [SDImageFrame frameWithImage:image duration:duration];
  169. [frames addObject:frame];
  170. }
  171. } while (WebPDemuxNextFrame(&iter));
  172. WebPDemuxReleaseIterator(&iter);
  173. WebPDemuxDelete(demuxer);
  174. CGContextRelease(canvas);
  175. CGColorSpaceRelease(colorSpace);
  176. UIImage *animatedImage = [SDImageCoderHelper animatedImageWithFrames:frames];
  177. animatedImage.sd_imageLoopCount = loopCount;
  178. animatedImage.sd_imageFormat = SDImageFormatWebP;
  179. return animatedImage;
  180. }
  181. #pragma mark - Progressive Decode
  182. - (instancetype)initIncrementalWithOptions:(nullable SDImageCoderOptions *)options {
  183. self = [super init];
  184. if (self) {
  185. // Progressive images need transparent, so always use premultiplied BGRA
  186. _idec = WebPINewRGB(MODE_bgrA, NULL, 0, 0);
  187. CGFloat scale = 1;
  188. NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
  189. if (scaleFactor != nil) {
  190. scale = [scaleFactor doubleValue];
  191. if (scale < 1) {
  192. scale = 1;
  193. }
  194. }
  195. _scale = scale;
  196. }
  197. return self;
  198. }
  199. - (void)updateIncrementalData:(NSData *)data finished:(BOOL)finished {
  200. if (_finished) {
  201. return;
  202. }
  203. _imageData = data;
  204. _finished = finished;
  205. VP8StatusCode status = WebPIUpdate(_idec, data.bytes, data.length);
  206. if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) {
  207. return;
  208. }
  209. // libwebp current does not support progressive decoding for animated image, so no need to scan and update the frame information
  210. }
  211. - (UIImage *)incrementalDecodedImageWithOptions:(SDImageCoderOptions *)options {
  212. UIImage *image;
  213. int width = 0;
  214. int height = 0;
  215. int last_y = 0;
  216. int stride = 0;
  217. uint8_t *rgba = WebPIDecGetRGB(_idec, &last_y, &width, &height, &stride);
  218. // last_y may be 0, means no enough bitmap data to decode, ignore this
  219. if (width + height > 0 && last_y > 0 && height >= last_y) {
  220. // Construct a UIImage from the decoded RGBA value array
  221. size_t rgbaSize = last_y * stride;
  222. CGDataProviderRef provider =
  223. CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
  224. CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
  225. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
  226. size_t components = 4;
  227. CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
  228. // Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
  229. // It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great
  230. // If different threads use WebPIDecGetRGB to grab rgba bitmap, it will contain the previous decoded bitmap data
  231. // So this will cause our drawed image looks strange(above is the current part but below is the previous part)
  232. // We only grab the last_y height and draw the last_y height instead of total height image
  233. // Besides fix, this can enhance performance since we do not need to create extra bitmap
  234. CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
  235. CGDataProviderRelease(provider);
  236. if (!imageRef) {
  237. return nil;
  238. }
  239. CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
  240. if (!canvas) {
  241. CGImageRelease(imageRef);
  242. return nil;
  243. }
  244. // Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system
  245. CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef);
  246. CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
  247. CGImageRelease(imageRef);
  248. if (!newImageRef) {
  249. CGContextRelease(canvas);
  250. return nil;
  251. }
  252. CGFloat scale = _scale;
  253. NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
  254. if (scaleFactor != nil) {
  255. scale = [scaleFactor doubleValue];
  256. if (scale < 1) {
  257. scale = 1;
  258. }
  259. }
  260. #if SD_UIKIT || SD_WATCH
  261. image = [[UIImage alloc] initWithCGImage:newImageRef scale:scale orientation:UIImageOrientationUp];
  262. #else
  263. image = [[UIImage alloc] initWithCGImage:newImageRef scale:scale orientation:kCGImagePropertyOrientationUp];
  264. #endif
  265. image.sd_isDecoded = YES; // Already drawn on bitmap context above
  266. image.sd_imageFormat = SDImageFormatWebP;
  267. CGImageRelease(newImageRef);
  268. CGContextRelease(canvas);
  269. }
  270. return image;
  271. }
  272. - (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
  273. size_t canvasHeight = CGBitmapContextGetHeight(canvas);
  274. CGFloat tmpX = iter.x_offset;
  275. CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
  276. CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
  277. if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  278. CGContextClearRect(canvas, imageRect);
  279. } else {
  280. CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
  281. if (!imageRef) {
  282. return;
  283. }
  284. BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
  285. // If not blend, cover the target image rect. (firstly clear then draw)
  286. if (!shouldBlend) {
  287. CGContextClearRect(canvas, imageRect);
  288. }
  289. CGContextDrawImage(canvas, imageRect, imageRef);
  290. CGImageRelease(imageRef);
  291. }
  292. }
  293. - (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
  294. CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
  295. if (!imageRef) {
  296. return nil;
  297. }
  298. size_t canvasHeight = CGBitmapContextGetHeight(canvas);
  299. CGFloat tmpX = iter.x_offset;
  300. CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
  301. CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
  302. BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
  303. // If not blend, cover the target image rect. (firstly clear then draw)
  304. if (!shouldBlend) {
  305. CGContextClearRect(canvas, imageRect);
  306. }
  307. CGContextDrawImage(canvas, imageRect, imageRef);
  308. CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
  309. CGImageRelease(imageRef);
  310. if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  311. CGContextClearRect(canvas, imageRect);
  312. }
  313. return newImageRef;
  314. }
  315. - (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
  316. WebPDecoderConfig config;
  317. if (!WebPInitDecoderConfig(&config)) {
  318. return nil;
  319. }
  320. if (WebPGetFeatures(webpData.bytes, webpData.size, &config.input) != VP8_STATUS_OK) {
  321. return nil;
  322. }
  323. BOOL hasAlpha = config.input.has_alpha;
  324. // iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
  325. // use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
  326. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
  327. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  328. config.options.use_threads = 1;
  329. config.output.colorspace = MODE_bgrA;
  330. // Decode the WebP image data into a RGBA value array
  331. if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
  332. return nil;
  333. }
  334. int width = config.input.width;
  335. int height = config.input.height;
  336. if (config.options.use_scaling) {
  337. width = config.options.scaled_width;
  338. height = config.options.scaled_height;
  339. }
  340. // Construct a UIImage from the decoded RGBA value array
  341. CGDataProviderRef provider =
  342. CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
  343. size_t bitsPerComponent = 8;
  344. size_t bitsPerPixel = 32;
  345. size_t bytesPerRow = config.output.u.RGBA.stride;
  346. CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
  347. CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
  348. CGDataProviderRelease(provider);
  349. return imageRef;
  350. }
  351. - (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
  352. int duration = iter.duration;
  353. if (duration <= 10) {
  354. // WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
  355. // Some animated WebP images also created without duration, we should keep compatibility
  356. duration = 100;
  357. }
  358. return duration / 1000.0;
  359. }
  360. // Create and return the correct colorspace by checking the ICC Profile
  361. - (nonnull CGColorSpaceRef)sd_colorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
  362. // WebP contains ICC Profile should use the desired colorspace, instead of default device colorspace
  363. // See: https://developers.google.com/speed/webp/docs/riff_container#color_profile
  364. CGColorSpaceRef colorSpaceRef = NULL;
  365. uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
  366. if (flags & ICCP_FLAG) {
  367. WebPChunkIterator chunk_iter;
  368. int result = WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunk_iter);
  369. if (result) {
  370. // See #2618, the `CGColorSpaceCreateWithICCProfile` does not copy ICC Profile data, it only retain `CFDataRef`.
  371. // When the libwebp `WebPDemuxer` dealloc, all chunks will be freed. So we must copy the ICC data (really cheap, less than 10KB)
  372. NSData *profileData = [NSData dataWithBytes:chunk_iter.chunk.bytes length:chunk_iter.chunk.size];
  373. colorSpaceRef = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)profileData);
  374. WebPDemuxReleaseChunkIterator(&chunk_iter);
  375. if (colorSpaceRef) {
  376. // We use RGB color model to decode WebP images currently, so we must filter out other colorSpace
  377. CGColorSpaceModel model = CGColorSpaceGetModel(colorSpaceRef);
  378. if (model != kCGColorSpaceModelRGB) {
  379. CGColorSpaceRelease(colorSpaceRef);
  380. colorSpaceRef = NULL;
  381. }
  382. }
  383. }
  384. }
  385. if (!colorSpaceRef) {
  386. colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
  387. CGColorSpaceRetain(colorSpaceRef);
  388. }
  389. return colorSpaceRef;
  390. }
  391. #pragma mark - Encode
  392. - (BOOL)canEncodeToFormat:(SDImageFormat)format {
  393. return (format == SDImageFormatWebP);
  394. }
  395. - (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format options:(nullable SDImageCoderOptions *)options {
  396. if (!image) {
  397. return nil;
  398. }
  399. NSData *data;
  400. double compressionQuality = 1;
  401. if (options[SDImageCoderEncodeCompressionQuality]) {
  402. compressionQuality = [options[SDImageCoderEncodeCompressionQuality] doubleValue];
  403. }
  404. NSArray<SDImageFrame *> *frames = [SDImageCoderHelper framesFromAnimatedImage:image];
  405. BOOL encodeFirstFrame = [options[SDImageCoderEncodeFirstFrameOnly] boolValue];
  406. if (encodeFirstFrame || frames.count == 0) {
  407. // for static single webp image
  408. data = [self sd_encodedWebpDataWithImage:image.CGImage quality:compressionQuality];
  409. } else {
  410. // for animated webp image
  411. WebPMux *mux = WebPMuxNew();
  412. if (!mux) {
  413. return nil;
  414. }
  415. for (size_t i = 0; i < frames.count; i++) {
  416. SDImageFrame *currentFrame = frames[i];
  417. NSData *webpData = [self sd_encodedWebpDataWithImage:currentFrame.image.CGImage quality:compressionQuality];
  418. int duration = currentFrame.duration * 1000;
  419. WebPMuxFrameInfo frame = { .bitstream.bytes = webpData.bytes,
  420. .bitstream.size = webpData.length,
  421. .duration = duration,
  422. .id = WEBP_CHUNK_ANMF,
  423. .dispose_method = WEBP_MUX_DISPOSE_BACKGROUND, // each frame will clear canvas
  424. .blend_method = WEBP_MUX_NO_BLEND
  425. };
  426. if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
  427. WebPMuxDelete(mux);
  428. return nil;
  429. }
  430. }
  431. int loopCount = (int)image.sd_imageLoopCount;
  432. WebPMuxAnimParams params = { .bgcolor = 0,
  433. .loop_count = loopCount
  434. };
  435. if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
  436. WebPMuxDelete(mux);
  437. return nil;
  438. }
  439. WebPData outputData;
  440. WebPMuxError error = WebPMuxAssemble(mux, &outputData);
  441. WebPMuxDelete(mux);
  442. if (error != WEBP_MUX_OK) {
  443. return nil;
  444. }
  445. data = [NSData dataWithBytes:outputData.bytes length:outputData.size];
  446. WebPDataClear(&outputData);
  447. }
  448. return data;
  449. }
  450. - (nullable NSData *)sd_encodedWebpDataWithImage:(nullable CGImageRef)imageRef quality:(double)quality {
  451. NSData *webpData;
  452. if (!imageRef) {
  453. return nil;
  454. }
  455. size_t width = CGImageGetWidth(imageRef);
  456. size_t height = CGImageGetHeight(imageRef);
  457. if (width == 0 || width > WEBP_MAX_DIMENSION) {
  458. return nil;
  459. }
  460. if (height == 0 || height > WEBP_MAX_DIMENSION) {
  461. return nil;
  462. }
  463. size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
  464. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  465. CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
  466. CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
  467. BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
  468. alphaInfo == kCGImageAlphaNoneSkipFirst ||
  469. alphaInfo == kCGImageAlphaNoneSkipLast);
  470. BOOL byteOrderNormal = NO;
  471. switch (byteOrderInfo) {
  472. case kCGBitmapByteOrderDefault: {
  473. byteOrderNormal = YES;
  474. } break;
  475. case kCGBitmapByteOrder32Little: {
  476. } break;
  477. case kCGBitmapByteOrder32Big: {
  478. byteOrderNormal = YES;
  479. } break;
  480. default: break;
  481. }
  482. // If we can not get bitmap buffer, early return
  483. CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
  484. if (!dataProvider) {
  485. return nil;
  486. }
  487. CFDataRef dataRef = CGDataProviderCopyData(dataProvider);
  488. if (!dataRef) {
  489. return nil;
  490. }
  491. uint8_t *rgba = NULL;
  492. // We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage
  493. if (byteOrderNormal && ((alphaInfo == kCGImageAlphaNone) || (alphaInfo == kCGImageAlphaLast))) {
  494. // If the input CGImage is already RGB888/RGBA8888
  495. rgba = (uint8_t *)CFDataGetBytePtr(dataRef);
  496. } else {
  497. // Convert all other cases to target color mode using vImage
  498. vImageConverterRef convertor = NULL;
  499. vImage_Error error = kvImageNoError;
  500. vImage_CGImageFormat srcFormat = {
  501. .bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(imageRef),
  502. .bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(imageRef),
  503. .colorSpace = CGImageGetColorSpace(imageRef),
  504. .bitmapInfo = bitmapInfo
  505. };
  506. vImage_CGImageFormat destFormat = {
  507. .bitsPerComponent = 8,
  508. .bitsPerPixel = hasAlpha ? 32 : 24,
  509. .colorSpace = [SDImageCoderHelper colorSpaceGetDeviceRGB],
  510. .bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
  511. };
  512. convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, &destFormat, NULL, kvImageNoFlags, &error);
  513. if (error != kvImageNoError) {
  514. CFRelease(dataRef);
  515. return nil;
  516. }
  517. vImage_Buffer src = {
  518. .data = (uint8_t *)CFDataGetBytePtr(dataRef),
  519. .width = width,
  520. .height = height,
  521. .rowBytes = bytesPerRow
  522. };
  523. vImage_Buffer dest;
  524. error = vImageBuffer_Init(&dest, height, width, destFormat.bitsPerPixel, kvImageNoFlags);
  525. if (error != kvImageNoError) {
  526. vImageConverter_Release(convertor);
  527. CFRelease(dataRef);
  528. return nil;
  529. }
  530. // Convert input color mode to RGB888/RGBA8888
  531. error = vImageConvert_AnyToAny(convertor, &src, &dest, NULL, kvImageNoFlags);
  532. vImageConverter_Release(convertor);
  533. if (error != kvImageNoError) {
  534. CFRelease(dataRef);
  535. return nil;
  536. }
  537. rgba = dest.data; // Converted buffer
  538. bytesPerRow = dest.rowBytes; // Converted bytePerRow
  539. CFRelease(dataRef);
  540. dataRef = NULL;
  541. }
  542. uint8_t *data = NULL; // Output WebP data
  543. float qualityFactor = quality * 100; // WebP quality is 0-100
  544. // Encode RGB888/RGBA8888 buffer to WebP data
  545. size_t size;
  546. if (hasAlpha) {
  547. size = WebPEncodeRGBA(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
  548. } else {
  549. size = WebPEncodeRGB(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
  550. }
  551. if (dataRef) {
  552. CFRelease(dataRef); // free non-converted rgba buffer
  553. dataRef = NULL;
  554. } else {
  555. free(rgba); // free converted rgba buffer
  556. rgba = NULL;
  557. }
  558. if (size) {
  559. // success
  560. webpData = [NSData dataWithBytes:data length:size];
  561. }
  562. if (data) {
  563. WebPFree(data);
  564. }
  565. return webpData;
  566. }
  567. static void FreeImageData(void *info, const void *data, size_t size) {
  568. free((void *)data);
  569. }
  570. #pragma mark - SDAnimatedImageCoder
  571. - (instancetype)initWithAnimatedImageData:(NSData *)data options:(nullable SDImageCoderOptions *)options {
  572. if (!data) {
  573. return nil;
  574. }
  575. if (self) {
  576. WebPData webpData;
  577. WebPDataInit(&webpData);
  578. webpData.bytes = data.bytes;
  579. webpData.size = data.length;
  580. WebPDemuxer *demuxer = WebPDemux(&webpData);
  581. if (!demuxer) {
  582. return nil;
  583. }
  584. BOOL framesValid = [self scanAndCheckFramesValidWithDemuxer:demuxer];
  585. if (!framesValid) {
  586. WebPDemuxDelete(demuxer);
  587. return nil;
  588. }
  589. CGFloat scale = 1;
  590. NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
  591. if (scaleFactor != nil) {
  592. scale = [scaleFactor doubleValue];
  593. if (scale < 1) {
  594. scale = 1;
  595. }
  596. }
  597. _scale = scale;
  598. _demux = demuxer;
  599. _imageData = data;
  600. _currentBlendIndex = NSNotFound;
  601. _lock = dispatch_semaphore_create(1);
  602. }
  603. return self;
  604. }
  605. - (BOOL)scanAndCheckFramesValidWithDemuxer:(WebPDemuxer *)demuxer {
  606. if (!demuxer) {
  607. return NO;
  608. }
  609. WebPIterator iter;
  610. if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
  611. WebPDemuxReleaseIterator(&iter);
  612. return NO;
  613. }
  614. uint32_t iterIndex = 0;
  615. uint32_t lastBlendIndex = 0;
  616. uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
  617. BOOL hasAnimation = flags & ANIMATION_FLAG;
  618. BOOL hasAlpha = flags & ALPHA_FLAG;
  619. int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  620. int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  621. uint32_t frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  622. uint32_t loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
  623. NSMutableArray<SDWebPCoderFrame *> *frames = [NSMutableArray array];
  624. // We should loop all the frames and scan each frames' blendFromIndex for later decoding, this can also ensure all frames is valid
  625. do {
  626. SDWebPCoderFrame *frame = [[SDWebPCoderFrame alloc] init];
  627. frame.index = iterIndex;
  628. frame.duration = [self sd_frameDurationWithIterator:iter];
  629. frame.width = iter.width;
  630. frame.height = iter.height;
  631. frame.hasAlpha = iter.has_alpha;
  632. frame.shouldDispose = iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND;
  633. frame.shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
  634. frame.offsetX = iter.x_offset;
  635. frame.offsetY = canvasHeight - iter.y_offset - iter.height;
  636. BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
  637. BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
  638. frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
  639. if ((!frame.shouldBlend || !frame.hasAlpha) && frame.isFullSize) {
  640. lastBlendIndex = iterIndex;
  641. frame.blendFromIndex = iterIndex;
  642. } else {
  643. if (frame.shouldDispose && frame.isFullSize) {
  644. frame.blendFromIndex = lastBlendIndex;
  645. lastBlendIndex = iterIndex + 1;
  646. } else {
  647. frame.blendFromIndex = lastBlendIndex;
  648. }
  649. }
  650. iterIndex++;
  651. [frames addObject:frame];
  652. } while (WebPDemuxNextFrame(&iter));
  653. WebPDemuxReleaseIterator(&iter);
  654. if (frames.count != frameCount) {
  655. return NO;
  656. }
  657. _frames = [frames copy];
  658. _hasAnimation = hasAnimation;
  659. _hasAlpha = hasAlpha;
  660. _canvasWidth = canvasWidth;
  661. _canvasHeight = canvasHeight;
  662. _frameCount = frameCount;
  663. _loopCount = loopCount;
  664. return YES;
  665. }
  666. - (NSData *)animatedImageData {
  667. return _imageData;
  668. }
  669. - (NSUInteger)animatedImageLoopCount {
  670. return _loopCount;
  671. }
  672. - (NSUInteger)animatedImageFrameCount {
  673. return _frameCount;
  674. }
  675. - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
  676. if (index >= _frameCount) {
  677. return 0;
  678. }
  679. return _frames[index].duration;
  680. }
  681. - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
  682. UIImage *image;
  683. if (index >= _frameCount) {
  684. return nil;
  685. }
  686. SD_LOCK(_lock);
  687. image = [self safeAnimatedImageFrameAtIndex:index];
  688. SD_UNLOCK(_lock);
  689. return image;
  690. }
  691. - (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
  692. if (!_canvas) {
  693. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
  694. bitmapInfo |= _hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  695. CGContextRef canvas = CGBitmapContextCreate(NULL, _canvasWidth, _canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
  696. if (!canvas) {
  697. return nil;
  698. }
  699. _canvas = canvas;
  700. }
  701. if (!_colorSpace) {
  702. _colorSpace = [self sd_colorSpaceWithDemuxer:_demux];
  703. }
  704. SDWebPCoderFrame *frame = _frames[index];
  705. UIImage *image;
  706. WebPIterator iter;
  707. // Because Animated WebP supports dispose method, which means frames can based on previous canvas context. However, if we clear canvas and loop from the 0 index until the request index, it's harm for performance.
  708. // But when one frame's dispose method is `WEBP_MUX_DISPOSE_BACKGROUND`, the canvas is cleared after the frame decoded. And subsequent frames are not effected by that frame.
  709. // So, we calculate each frame's `blendFromIndex`. Then directly draw canvas from that index, instead of always from 0 index.
  710. if (_currentBlendIndex + 1 == index) {
  711. // If the request index is subsequence of current blend index, it does not matter what dispose method is. The canvas is always ready.
  712. _currentBlendIndex = index;
  713. NSUInteger startIndex = index;
  714. // libwebp's index start with 1
  715. if (!WebPDemuxGetFrame(_demux, (int)(startIndex + 1), &iter)) {
  716. WebPDemuxReleaseIterator(&iter);
  717. return nil;
  718. }
  719. } else {
  720. // Else, this can happen when one image set to different imageViews or one loop end. So we should clear the canvas. Then draw until the canvas is ready.
  721. if (_currentBlendIndex != NSNotFound) {
  722. CGContextClearRect(_canvas, CGRectMake(0, 0, _canvasWidth, _canvasHeight));
  723. }
  724. _currentBlendIndex = index;
  725. // Then, loop from the blend from index, draw each of previous frames on the canvas.
  726. // We use do while loop to call `WebPDemuxNextFrame`(fast), until the endIndex meet.
  727. size_t startIndex = frame.blendFromIndex;
  728. size_t endIndex = frame.index;
  729. // libwebp's index start with 1
  730. if (!WebPDemuxGetFrame(_demux, (int)(startIndex + 1), &iter)) {
  731. WebPDemuxReleaseIterator(&iter);
  732. return nil;
  733. }
  734. // Draw from range: [startIndex, endIndex)
  735. if (endIndex > startIndex) {
  736. do {
  737. @autoreleasepool {
  738. [self sd_blendWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
  739. }
  740. } while ((size_t)iter.frame_num < (endIndex + 1) && WebPDemuxNextFrame(&iter));
  741. }
  742. }
  743. // Now the canvas is ready, which respects of dispose method behavior. Just do normal decoding and produce image.
  744. CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
  745. if (!imageRef) {
  746. return nil;
  747. }
  748. #if SD_UIKIT || SD_WATCH
  749. image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:UIImageOrientationUp];
  750. #else
  751. image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:kCGImagePropertyOrientationUp];
  752. #endif
  753. CGImageRelease(imageRef);
  754. WebPDemuxReleaseIterator(&iter);
  755. return image;
  756. }
  757. @end