diff --git a/.github/workflows/check-image-decoding.yml b/.github/workflows/check-image-decoding.yml index 12d9219..83ea600 100644 --- a/.github/workflows/check-image-decoding.yml +++ b/.github/workflows/check-image-decoding.yml @@ -1,4 +1,4 @@ -name: Check the decoded images. +name: Image Decoding Test on: [push, pull_request] diff --git a/Cartfile b/Cartfile index 025a051..bd1a409 100644 --- a/Cartfile +++ b/Cartfile @@ -1,2 +1,2 @@ github "SDWebImage/SDWebImage" ~> 5.10 -github "SDWebImage/libavif-Xcode" >= 0.9.1 \ No newline at end of file +github "SDWebImage/libavif-Xcode" >= 0.11.2-rc1 \ No newline at end of file diff --git a/Cartfile.resolved b/Cartfile.resolved index 174319a..ca7e367 100644 --- a/Cartfile.resolved +++ b/Cartfile.resolved @@ -1,3 +1,4 @@ -github "SDWebImage/SDWebImage" "5.10.2" -github "SDWebImage/libaom-Xcode" "1.0.2" -github "SDWebImage/libavif-Xcode" "0.8.1" +github "SDWebImage/SDWebImage" "5.17.0" +github "SDWebImage/libaom-Xcode" "3.0.0" +github "SDWebImage/libavif-Xcode" "0.11.2-rc1" +github "SDWebImage/libvmaf-Xcode" "2.3.1" diff --git a/Example/Podfile b/Example/Podfile index d1a9718..3dae6df 100644 --- a/Example/Podfile +++ b/Example/Podfile @@ -3,7 +3,7 @@ install! 'cocoapods', :generate_multiple_pod_projects => true target 'SDWebImageAVIFCoder_Example' do platform :ios, '9.0' pod 'SDWebImageAVIFCoder', :path => '../' - pod 'libavif', :subspecs => ['libaom', 'libdav1d'] + pod 'libavif', :subspecs => ['core', 'librav1e', 'libdav1d'] target 'SDWebImageAVIFCoder_Tests' do inherit! :search_paths @@ -13,11 +13,11 @@ end target 'SDWebImageAVIFCoder_Example macOS' do platform :osx, '10.11' pod 'SDWebImageAVIFCoder', :path => '../' - pod 'libavif', :subspecs => ['libaom', 'libdav1d'] + pod 'libavif', :subspecs => ['core', 'librav1e', 'libdav1d'] end target 'SDWebImageAVIFCoder_Example CLI' do platform :osx, '10.11' pod 'SDWebImageAVIFCoder', :path => '../' - pod 'libavif', :subspecs => ['libaom', 'libdav1d'] + pod 'libavif', :subspecs => ['core', 'librav1e', 'libdav1d'] end diff --git a/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj b/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj index cfdfa4c..e4f706b 100644 --- a/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj +++ b/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 32B000392A137E8700FB2873 /* fox.profile0.8bpc.yuv420.avif in Resources */ = {isa = PBXBuildFile; fileRef = 32B000372A137DAA00FB2873 /* fox.profile0.8bpc.yuv420.avif */; }; 32D3263C226344EC001B208C /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 32D3263B226344EC001B208C /* AppDelegate.m */; }; 32D3263F226344EC001B208C /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 32D3263E226344EC001B208C /* ViewController.m */; }; 32D32641226344EC001B208C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 32D32640226344EC001B208C /* Assets.xcassets */; }; @@ -1866,6 +1867,7 @@ 207C633217DCC3D0312C335C /* Pods-SDWebImageAVIFCoder_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SDWebImageAVIFCoder_Example.release.xcconfig"; path = "Target Support Files/Pods-SDWebImageAVIFCoder_Example/Pods-SDWebImageAVIFCoder_Example.release.xcconfig"; sourceTree = ""; }; 2783F1D7B48DB3ED9ADFF864 /* libPods-SDWebImageAVIFCoder_Example.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-SDWebImageAVIFCoder_Example.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3260C60722634CF90046E4C8 /* SDWebImageAVIFCoder_Example macOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "SDWebImageAVIFCoder_Example macOS.entitlements"; sourceTree = ""; }; + 32B000372A137DAA00FB2873 /* fox.profile0.8bpc.yuv420.avif */ = {isa = PBXFileReference; lastKnownFileType = file; path = fox.profile0.8bpc.yuv420.avif; sourceTree = ""; }; 32D32638226344EC001B208C /* SDWebImageAVIFCoder_Example macOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "SDWebImageAVIFCoder_Example macOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 32D3263A226344EC001B208C /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 32D3263B226344EC001B208C /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; @@ -3757,6 +3759,14 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 32B000362A137DAA00FB2873 /* Samples */ = { + isa = PBXGroup; + children = ( + 32B000372A137DAA00FB2873 /* fox.profile0.8bpc.yuv420.avif */, + ); + path = Samples; + sourceTree = ""; + }; 32D32639226344EC001B208C /* SDWebImageAVIFCoder_Example macOS */ = { isa = PBXGroup; children = ( @@ -3844,6 +3854,7 @@ 6003F5B5195388D20070C39A /* Tests */ = { isa = PBXGroup; children = ( + 32B000362A137DAA00FB2873 /* Samples */, 6D074D3D240E94FC002197C8 /* Images */, 6003F5BB195388D20070C39A /* Tests.m */, 6003F5B6195388D20070C39A /* Supporting Files */, @@ -7565,6 +7576,7 @@ 6D07558A240E9507002197C8 /* green.255x255.8bpc.yuv420.color.limited.without-alpha.avif in Resources */, 6D075676240E9508002197C8 /* blue.255x255.10bpc.yuv444.color.limited.with-alpha.avif in Resources */, 6D0756EC240E9508002197C8 /* e47a8c.256x255.12bpc.yuv422.color.limited.without-alpha.avif in Resources */, + 32B000392A137E8700FB2873 /* fox.profile0.8bpc.yuv420.avif in Resources */, 6D0755EE240E9508002197C8 /* gray.256x255.12bpc.yuv422.color.full.without-alpha.avif in Resources */, 6D07563D240E9508002197C8 /* e47a8c.256x256.8bpc.yuv444.color.limited.without-alpha.avif in Resources */, 6D0758C6240E9508002197C8 /* blue.255x255.10bpc.yuv420.mono.limited.with-alpha.avif in Resources */, @@ -8049,10 +8061,10 @@ "DEBUG=1", "$(inherited)", ); + HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private"; INFOPLIST_FILE = "Tests/Tests-Info.plist"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; - HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SDWebImageAVIFCoder_Example.app/SDWebImageAVIFCoder_Example"; WRAPPER_EXTENSION = xctest; }; diff --git a/Example/SDWebImageAVIFCoder/SDViewController.m b/Example/SDWebImageAVIFCoder/SDViewController.m index afad1d0..4c342bf 100644 --- a/Example/SDWebImageAVIFCoder/SDViewController.m +++ b/Example/SDWebImageAVIFCoder/SDViewController.m @@ -33,7 +33,7 @@ - (void)viewDidLoad [self.view addSubview:imageView1]; [self.view addSubview:imageView2]; - [imageView1 sd_setImageWithURL:AVIFURL placeholderImage:nil options:0 completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) { + [imageView1 sd_setImageWithURL:AVIFURL completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) { if (image) { NSLog(@"Static AVIF load success"); dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ @@ -44,7 +44,10 @@ - (void)viewDidLoad }); } }]; - [imageView2 sd_setImageWithURL:animatedAVIFSURL completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) { + CGSize animatedThumbnailSize = CGSizeMake(100, 100); + [imageView2 sd_setImageWithURL:animatedAVIFSURL placeholderImage:nil options:0 context:@{SDWebImageContextImageThumbnailPixelSize : @(animatedThumbnailSize)} progress:nil completed:^(UIImage * _Nullable image, NSError * _Nullable error, SDImageCacheType cacheType, NSURL * _Nullable imageURL) { + NSCAssert(image.size.width == 100, @"Thumbnail width should be 100"); + NSCAssert(image.size.height == 100, @"Thumbnail height should be 100"); if (image) { NSLog(@"Animated AVIFS load success"); } diff --git a/Example/Tests/Samples/fox.profile0.8bpc.yuv420.avif b/Example/Tests/Samples/fox.profile0.8bpc.yuv420.avif new file mode 100644 index 0000000..2bae4c7 Binary files /dev/null and b/Example/Tests/Samples/fox.profile0.8bpc.yuv420.avif differ diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 65810ee..aabc572 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -267,6 +267,21 @@ -(void)testEncodingAndDecoding [self assertColor8:@"" img:image.CGImage expectedColor: kRed8]; } +-(void)testThumbnailDecoding +{ + NSString *filename = @"fox.profile0.8bpc.yuv420.avif"; + NSString* imgBundle = [[NSBundle mainBundle] pathForResource:filename ofType:@""]; + NSData* imgData = [[NSData alloc] initWithContentsOfFile: imgBundle]; + UIImage *originImage = [self->coder decodedImageWithData:imgData options:nil]; + XCTAssertEqual(originImage.size.width, 1204); + XCTAssertEqual(originImage.size.height, 800); + + CGSize thumbnailSize = CGSizeMake(100, 100); + UIImage *thumbnailImage = [self->coder decodedImageWithData:imgData options:@{SDImageCoderDecodeThumbnailPixelSize: @(thumbnailSize)}]; + XCTAssertEqual(thumbnailImage.size.width, 100); + XCTAssertEqual(thumbnailImage.size.height, 67); +} + -(void)assertColor8: (NSString*)filename img:(CGImageRef)img expectedColor:(UInt8*)expectedColor { CFDataRef rawData = CGDataProviderCopyData(CGImageGetDataProvider(img)); diff --git a/Package.resolved b/Package.resolved index 0f610f2..f464b7f 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,8 +6,8 @@ "repositoryURL": "https://github.com/SDWebImage/libaom-Xcode.git", "state": { "branch": null, - "revision": "81e75e1663855b8b53f6e26ea1ae087ea8f6ccbc", - "version": "1.0.2" + "revision": "482cafbebbc5f32378b82339b7580761fab4fd23", + "version": "2.0.2" } }, { @@ -15,8 +15,17 @@ "repositoryURL": "https://github.com/SDWebImage/libavif-Xcode.git", "state": { "branch": null, - "revision": "d02dedabc1bf50f415a11aabbe5ad876a01251e9", - "version": "0.8.1" + "revision": "28be85d8693b8bc2ea3a4d323caf652e740b4683", + "version": "0.9.1" + } + }, + { + "package": "libvmaf", + "repositoryURL": "https://github.com/SDWebImage/libvmaf-Xcode.git", + "state": { + "branch": null, + "revision": "26544e92506764862358ce2198ddab9af7685ed5", + "version": "2.2.0" } }, { diff --git a/Package.swift b/Package.swift index 67794d4..6198bb4 100644 --- a/Package.swift +++ b/Package.swift @@ -18,7 +18,7 @@ let package = Package( // Dependencies declare other packages that this package depends on. // .package(url: /* package url */, from: "1.0.0"), .package(url: "https://github.com/SDWebImage/SDWebImage.git", from: "5.10.0"), - .package(url: "https://github.com/SDWebImage/libavif-Xcode.git", from: "0.9.1") + .package(url: "https://github.com/SDWebImage/libavif-Xcode.git", from: "0.11.0") ], targets: [ // Targets are the basic building blocks of a package. A target can define a module or a test suite. diff --git a/README.md b/README.md index 7ed2864..6dfd1b8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # SDWebImageAVIFCoder -[![CI Status](https://img.shields.io/travis/SDWebImage/SDWebImageAVIFCoder.svg?style=flat)](https://travis-ci.org/SDWebImage/SDWebImageAVIFCoder) +[![Build Status](https://github.com/SDWebImage/SDWebImageAVIFCoder/actions/workflows/check-image-decoding.yml/badge.svg)](https://github.com/SDWebImage/SDWebImageAVIFCoder/actions/workflows/check-image-decoding.yml) [![Version](https://img.shields.io/cocoapods/v/SDWebImageAVIFCoder.svg?style=flat)](https://cocoapods.org/pods/SDWebImageAVIFCoder) [![License](https://img.shields.io/cocoapods/l/SDWebImageAVIFCoder.svg?style=flat)](https://cocoapods.org/pods/SDWebImageAVIFCoder) [![Platform](https://img.shields.io/cocoapods/p/SDWebImageAVIFCoder.svg?style=flat)](https://cocoapods.org/pods/SDWebImageAVIFCoder) @@ -31,7 +31,25 @@ You can choose the codec and use `libavif` CocoaPods subspec to choose the one y ### aom -By default, libavif is built with [aom](https://aomedia.googlesource.com/aom/) codec support. aom is the first AV1 codec during the standard draft implementation. The aom support both decodinng and encoding ++ For Carthage/SwifftPM + +libavif is built with [aom](https://aomedia.googlesource.com/aom/) codec support. aom is the first AV1 codec during the standard draft implementation. The aom support both decodinng and encoding + ++ For CocoaPods + +Because of strange design of **subspec dependencies resolution algorithm**, we can not make aom by default (or all the subspecs will always link aom, which means not optional). So libavif is built with nothing codec (`libavif/core`). You need to choose the actual AV1 codec using one or more of below. + +If you want to use aom, use: + +```ruby +pod 'libavif/liaom' +``` + +or other AV1 codec, like dav1d: + +```ruby +pod 'libavif/libdav1d' +``` ### dav1d (Decoding) @@ -53,23 +71,56 @@ From v0.8.3, libavif can built with libgav1. For For CocoaPods user, you can sim See more about [performance](https://github.com/xiph/rav1e/issues/1248) -From v0.4.3, libavif can built with rav1e. For CocoaPods user, you can simply use the subspec for this. Carthage for optional rav1c codec is not supported currently. +From v0.4.3, libavif can built with rav1e. For CocoaPods user, you can simply use the subspec for this. Carthage/SPM for optional rav1c codec is not supported currently. + +Note: The librav1e on CocoaPods use pre-built static-linking binary. Which means, it can not be used for CocoaPods's dynamic-linking. -Note rav1e currently only support iOS && macOS. watchOS and tvOS supports need Rust community upstream support. +To use for framework package format (CocoaPods defaults use ar archive format), you should write something like this: -Note that for CocoaPods user, rav1e is prebuilt binary (to avoid developer to install rust toolchain) and hosted on GitHub Git LFS. Make sure you have git-lfs installed. +```ruby +use_frameworks! :linkage => :static +``` -```bash -brew install git-lfs -git lfs install +but not this: + +```ruby +use_frameworks! +# use_frameworks! :linkage => :dynamic ``` +Note: before librav1e v0.6.1, it only support iOS && macOS. watchOS and tvOS is avaiable from v0.6.2 with the latest Rust toolchain. And for visionOS supports need Rust community upstream support. For latest integration, check or fire issues in https://github.com/SDWebImage/librav1e-Xcode. + ### SVT-AV1 (Encoding) [SVT-AV1](https://gitlab.com/AOMediaCodec/SVT-AV1) is the Scalable Video Technology for AV1 (SVT-AV1 Encoder and Decoder) is an AV1-compliant encoder/decoder library core. From v0.8.3, libavif can built with STV-AV1. For For CocoaPods user, you can simply use the subspec for this. +## Choose codec at runtime + +Although libavif has a automatic detection of installed AVIF codec, but for example, `aom` supports both decoding and encoding, but slower than `rav1e` on encoding. + +You can force to use the `aom` for decoding and `rav1e` for encoding like this: + ++ Objective-C + +```objective-c +NSDictionary *decodeOptions = [SDImageCoderAVIFDecodeCodecChoice: @(AVIF_CODEC_CHOICE_AOM)]; +NSDictionary *encodeOptions = [SDImageCoderAVIFEncodeCodecChoice: @(AVIF_CODEC_CHOICE_RAV1E)]; + +// Pass from UI level options +[imageView sd_setImageWithURL:url placeholderImage:nil options:0 context:@{SDWebImageContextImageDecodeOptions: decodeOptions, SDWebImageContextImageEncodeOptions: encodeOptions} progress:nil completed:nil]; +``` + ++ Swift + +```swift +let decodeOptions: [SDImageCoderOption: Any] = [.avifDecodeCodecChoice: AVIF_CODEC_CHOICE_AOM.rawValue] +let encodeOptions = [SDImageCoderOption: Any] = [.avifEncodeCodecChoice: AVIF_CODEC_CHOICE_RAV1E.rawValue] + +// Pass from UI level options +imageView.sd_setImage(with: url, placeholderImage: nil, options: [], context: [.imageDecodeOptions: decodeOptions, .imageEncodeOptions: encodeOptions], progress: nil, completed: nil) +``` ## Requirements @@ -92,7 +143,7 @@ Note: From version 0.4.0, if you want to use rav1e or dav1e instead aom for fast ```ruby pod 'SDWebImageAVIFCoder' -pod 'libavif', :subpsecs => [ +pod 'libavif', :subspecs => [ 'libdav1d', 'librav1e' ] @@ -102,7 +153,7 @@ or, for libgav1 && SVT-AV1, use: ```ruby pod 'SDWebImageAVIFCoder' -pod 'libavif', :subpsecs => [ +pod 'libavif', :subspecs => [ 'libgva1', 'SVT-AV1' ] @@ -129,7 +180,7 @@ The framework through SwiftPM only supports libaom for AVIF decoding && encoding ```swift let package = Package( dependencies: [ - .package(url: "https://github.com/SDWebImage/SDWebImageAVIF.git", from: "0.5") + .package(url: "https://github.com/SDWebImage/SDWebImageAVIFCoder.git", from: "0.5") ] ) ``` @@ -156,6 +207,66 @@ let imageView: UIImageView imageView.sd_setImage(with: url) ``` +### Decoding + ++ Objective-C + +```objective-c +// AVIF image decoding +NSData *avifData; +UIImage *image = [[SDImageAVIFCoder sharedCoder] decodedImageWithData:avifData options:nil]; +``` + ++ Swift + +```swift +// AVIF image decoding +let avifData: Data +let image = SDImageAVIFCoder.shared.decodedImage(with: data, options: nil) +``` + +### Thumbnail Decoding (0.10.0+) + ++ Objective-C + +```objective-c +// AVIF thumbnail image decoding +NSData *avifData; +CGSize thumbnailSize = CGSizeMake(300, 300); +UIImage *thumbnailImage = [[SDImageAVIFCoder sharedCoder] decodedImageWithData:avifData options:@{SDImageCoderDecodeThumbnailPixelSize : @(thumbnailSize)}]; +``` + ++ Swift + +```swift +// AVIF thumbnail image decoding +let avifData: Data +let thumbnailSize = CGSize(width: 300, height: 300) +let image = SDImageAVIFCoder.shared.decodedImage(with: data, options: [.decodeThumbnailPixelSize: thumbnailSize]) +``` + +### Encoding (No animtion support) + ++ Objective-c + +```objective-c +// AVIF image encoding +UIImage *image; +NSData *avifData = [[SDImageAVIFCoder sharedCoder] encodedDataWithImage:image format:SDImageFormatAVIF options:nil]; +// Encode Quality +NSData *lossyAVIFData = [[SDImageAVIFCoder sharedCoder] encodedDataWithImage:image format:SDImageFormatAVIF options:@{SDImageCoderEncodeCompressionQuality : @(0.1)}]; // [0, 1] compression quality +``` + ++ Swift + +```swift +// AVIF image encoding +let image: UIImage +let avifData = SDImageAVIFCoder.shared.encodedData(with: image, format: .avif, options: nil) +// Encode Quality +let lossyAVIFData = SDImageAVIFCoder.shared.encodedData(with: image, format: .avif, options: [.encodeCompressionQuality: 0.1]) // [0, 1] compression quality +``` + ## Screenshot @@ -185,7 +296,7 @@ DreamPiggy, lizhuoli1126@126.com ## Contributor -ledyba-z, ryo.hirafuji@link-u.co.jp +[ledyba-z](https://github.com/ledyba-z), ryo.hirafuji@gmail.com ## License diff --git a/SDWebImageAVIFCoder.podspec b/SDWebImageAVIFCoder.podspec index e3db3a0..e90b0fc 100644 --- a/SDWebImageAVIFCoder.podspec +++ b/SDWebImageAVIFCoder.podspec @@ -8,7 +8,7 @@ Pod::Spec.new do |s| s.name = 'SDWebImageAVIFCoder' - s.version = '0.9.0' + s.version = '0.11.1' s.summary = 'A SDWebImage coder plugin to support AVIF(AV1 Image File Format) image' # This description is used to generate tags and improve search results. @@ -40,6 +40,6 @@ Which is built based on the open-sourced libavif codec. } s.dependency 'SDWebImage', '~> 5.10' - s.dependency 'libavif', '>= 0.9.1' + s.dependency 'libavif/core', '>= 0.11.0' s.libraries = 'c++' end diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.m b/SDWebImageAVIFCoder/Classes/ColorSpace.m index dca0a8b..e90f0f2 100644 --- a/SDWebImageAVIFCoder/Classes/ColorSpace.m +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.m @@ -7,11 +7,11 @@ #import "SDImageAVIFCoder.h" #import -#if __has_include() +#if __has_include() && __has_include() #import #import #else -#import "avif/avifs.h" +#import "avif/avif.h" #import "avif/internal.h" #endif @@ -164,45 +164,37 @@ static void CalcTransferFunction(uint16_t const transferCharacteristics, vImageT } } CGColorSpaceRef SDAVIFCreateColorSpaceMono(avifColorPrimaries const colorPrimaries, avifTransferCharacteristics const transferCharacteristics) { - if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { - vImage_Error err; - vImageWhitePoint white; - vImageTransferFunction transfer; - CalcWhitePoint(colorPrimaries, &white); - CalcTransferFunction(transferCharacteristics, &transfer); - CGColorSpaceRef colorSpace = vImageCreateMonochromeColorSpaceWithWhitePointAndTransferFunction(&white, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); - if(err != kvImageNoError) { - NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); - if(colorSpace != NULL) { - CGColorSpaceRelease(colorSpace); - } - return NULL; + vImage_Error err; + vImageWhitePoint white; + vImageTransferFunction transfer; + CalcWhitePoint(colorPrimaries, &white); + CalcTransferFunction(transferCharacteristics, &transfer); + CGColorSpaceRef colorSpace = vImageCreateMonochromeColorSpaceWithWhitePointAndTransferFunction(&white, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); + if(err != kvImageNoError) { + NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); + if(colorSpace != NULL) { + CGColorSpaceRelease(colorSpace); } - return colorSpace; - }else{ return NULL; } + return colorSpace; } CGColorSpaceRef SDAVIFCreateColorSpaceRGB(avifColorPrimaries const colorPrimaries, avifTransferCharacteristics const transferCharacteristics) { - if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { - vImage_Error err; - vImageRGBPrimaries primaries; - vImageTransferFunction transfer; - CalcRGBPrimaries(colorPrimaries, &primaries); - CalcTransferFunction(transferCharacteristics, &transfer); - CGColorSpaceRef colorSpace = vImageCreateRGBColorSpaceWithPrimariesAndTransferFunction(&primaries, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); - if(err != kvImageNoError) { - NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); - if(colorSpace != NULL) { - CGColorSpaceRelease(colorSpace); - } - return NULL; + vImage_Error err; + vImageRGBPrimaries primaries; + vImageTransferFunction transfer; + CalcRGBPrimaries(colorPrimaries, &primaries); + CalcTransferFunction(transferCharacteristics, &transfer); + CGColorSpaceRef colorSpace = vImageCreateRGBColorSpaceWithPrimariesAndTransferFunction(&primaries, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); + if(err != kvImageNoError) { + NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); + if(colorSpace != NULL) { + CGColorSpaceRelease(colorSpace); } - return colorSpace; - }else{ return NULL; } + return colorSpace; } void SDAVIFCalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { @@ -214,8 +206,8 @@ void SDAVIFCalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou }); } if(avif->icc.data && avif->icc.size) { - if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - CFDataRef iccData = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, avif->icc.data, avif->icc.size,kCFAllocatorNull); + if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, watchOS 3.0, *)) { + CFDataRef iccData = CFDataCreate(kCFAllocatorDefault, avif->icc.data, avif->icc.size); *ref = CGColorSpaceCreateWithICCData(iccData); CFRelease(iccData); *shouldRelease = TRUE; @@ -312,8 +304,8 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul }); } if(avif->icc.data && avif->icc.size) { - if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - CFDataRef iccData = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, avif->icc.data, avif->icc.size,kCFAllocatorNull); + if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, watchOS 3.0, *)) { + CFDataRef iccData = CFDataCreate(kCFAllocatorDefault, avif->icc.data, avif->icc.size); *ref = CGColorSpaceCreateWithICCData(iccData); CFRelease(iccData); *shouldRelease = TRUE; @@ -339,7 +331,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef bt709 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { + if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, watchOS 2.0, *)) { bt709 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709); } else { bt709 = defaultColorSpace; @@ -354,7 +346,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef sRGB = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.5, iOS 9.0, tvOS 9.0, *)) { + if (@available(macOS 10.5, iOS 9.0, tvOS 9.0, watchOS 2.0, *)) { sRGB = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); } else { sRGB = defaultColorSpace; @@ -369,7 +361,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef sRGBlinear = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { + if (@available(macOS 10.12, iOS 10.0, tvOS 10.0, watchOS 3.0, *)) { sRGBlinear = CGColorSpaceCreateWithName(kCGColorSpaceLinearSRGB); } else { sRGBlinear = defaultColorSpace; @@ -385,7 +377,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef bt2020 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { + if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, watchOS 2.0, *)) { bt2020 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020); } else { bt2020 = defaultColorSpace; @@ -395,12 +387,57 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul *shouldRelease = FALSE; return; } + if(colorPrimaries == AVIF_COLOR_PRIMARIES_BT2020 && + transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_SMPTE2084) { + static CGColorSpaceRef bt2020pq = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CFStringRef colorSpaceName = NULL; + if (@available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *)) { + colorSpaceName = kCGColorSpaceITUR_2100_PQ; + } else if (@available(macOS 10.15.4, iOS 13.4, tvOS 13.4, watchOS 6.2, *)) { + colorSpaceName = kCGColorSpaceITUR_2020_PQ; + } else if (@available(macOS 10.14.6, iOS 12.6, tvOS 12.0, watchOS 5.0, *)) { + colorSpaceName = kCGColorSpaceITUR_2020_PQ_EOTF; + } + if (colorSpaceName) { + bt2020pq = CGColorSpaceCreateWithName(colorSpaceName); + } else { + bt2020pq = defaultColorSpace; + } + }); + *ref = bt2020pq; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_COLOR_PRIMARIES_BT2020 && + transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_HLG) { + static CGColorSpaceRef bt2020hlg = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CFStringRef colorSpaceName = NULL; + if (@available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *)) { + colorSpaceName = kCGColorSpaceITUR_2100_HLG; + } else if (@available(macOS 10.15.6, iOS 12.6, tvOS 12.0, watchOS 5.0, *)) { + colorSpaceName = kCGColorSpaceITUR_2020_HLG; + } + if (colorSpaceName) { + bt2020hlg = CGColorSpaceCreateWithName(colorSpaceName); + } else { + bt2020hlg = defaultColorSpace; + } + }); + + *ref = bt2020hlg; + *shouldRelease = FALSE; + return; + } if(colorPrimaries == AVIF_COLOR_PRIMARIES_BT2020 && transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_LINEAR) { static CGColorSpaceRef bt2020linear = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { + if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, watchOS 5.0, *)) { bt2020linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearITUR_2020); } else { bt2020linear = defaultColorSpace; @@ -415,7 +452,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef p3 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11.2, iOS 9.3, tvOS 9.3, *)) { + if (@available(macOS 10.11.2, iOS 9.3, tvOS 9.3, watchOS 2.2, *)) { p3 = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); } else { p3 = defaultColorSpace; @@ -425,12 +462,33 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul *shouldRelease = FALSE; return; } + if(colorPrimaries == AVIF_COLOR_PRIMARIES_SMPTE432 /* Display P3 */ && + transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_SMPTE2084) { + static CGColorSpaceRef p3pq = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CFStringRef colorSpaceName = NULL; + if (@available(macOS 10.15.4, iOS 13.4, tvOS 13.4, watchOS 6.2, *)) { + colorSpaceName = kCGColorSpaceDisplayP3_PQ; + } else if (@available(macOS 10.14.6, iOS 12.6, tvOS 12.0, watchOS 5.0, *)) { + colorSpaceName = kCGColorSpaceDisplayP3_PQ_EOTF; + } + if (colorSpaceName) { + p3pq = CGColorSpaceCreateWithName(colorSpaceName); + } else { + p3pq = defaultColorSpace; + } + }); + *ref = p3pq; + *shouldRelease = FALSE; + return; + } if(colorPrimaries == AVIF_COLOR_PRIMARIES_SMPTE432 /* Display P3 */ && transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_HLG) { static CGColorSpaceRef p3hlg = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { + if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, watchOS 5.0, *)) { p3hlg = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3_HLG); } else { p3hlg = defaultColorSpace; @@ -446,7 +504,7 @@ void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul static CGColorSpaceRef p3linear = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { + if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, watchOS 5.0, *)) { p3linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearDisplayP3); } else { p3linear = defaultColorSpace; diff --git a/SDWebImageAVIFCoder/Classes/Conversion.m b/SDWebImageAVIFCoder/Classes/Conversion.m index 3b0aa86..9cc0f7a 100644 --- a/SDWebImageAVIFCoder/Classes/Conversion.m +++ b/SDWebImageAVIFCoder/Classes/Conversion.m @@ -7,11 +7,11 @@ #import "SDImageAVIFCoder.h" #import -#if __has_include() +#if __has_include() && __has_include() #import #import #else -#import "avif/avifs.h" +#import "avif/avif.h" #import "avif/internal.h" #endif #import "Private/ColorSpace.h" @@ -55,36 +55,7 @@ static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) return imageRef; } -static avifBool avifPrepareReformatState(const avifImage * image, const avifRGBImage * rgb, avifReformatState * state) -{ - if ((image->depth != 8) && (image->depth != 10) && (image->depth != 12)) { - return AVIF_FALSE; - } - if ((rgb->depth != 8) && (rgb->depth != 10) && (rgb->depth != 12) && (rgb->depth != 16)) { - return AVIF_FALSE; - } - - // These matrix coefficients values are currently unsupported. Revise this list as more support is added. - // - // YCgCo performs limited-full range adjustment on R,G,B but the current implementation performs range adjustment - // on Y,U,V. So YCgCo with limited range is unsupported. - if ((image->matrixCoefficients == 3 /* CICP reserved */) || - ((image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_YCGCO) && (image->yuvRange == AVIF_RANGE_LIMITED)) || - (image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_BT2020_CL) || - (image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_SMPTE2085) || - (image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_CHROMA_DERIVED_CL) || - (image->matrixCoefficients >= AVIF_MATRIX_COEFFICIENTS_ICTCP)) { // Note the >= catching "future" CICP values here too - return AVIF_FALSE; - } - - if ((image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_IDENTITY) && (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV444)) { - return AVIF_FALSE; - } - - if (image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) { - return AVIF_FALSE; - } - +static void PrepareReformatState(const avifImage * image, avifReformatState * state) { avifGetPixelFormatInfo(image->yuvFormat, &state->formatInfo); avifCalcYUVCoefficients(image, &state->kr, &state->kg, &state->kb); state->mode = AVIF_REFORMAT_MODE_YUV_COEFFICIENTS; @@ -100,115 +71,13 @@ static avifBool avifPrepareReformatState(const avifImage * image, const avifRGBI state->kg = 0.0f; state->kb = 0.0f; } - - state->yuvChannelBytes = (image->depth > 8) ? 2 : 1; - state->rgbChannelBytes = (rgb->depth > 8) ? 2 : 1; - state->rgbChannelCount = avifRGBFormatChannelCount(rgb->format); - state->rgbPixelBytes = state->rgbChannelBytes * state->rgbChannelCount; - - switch (rgb->format) { - case AVIF_RGB_FORMAT_RGB: - state->rgbOffsetBytesR = state->rgbChannelBytes * 0; - state->rgbOffsetBytesG = state->rgbChannelBytes * 1; - state->rgbOffsetBytesB = state->rgbChannelBytes * 2; - state->rgbOffsetBytesA = 0; - break; - case AVIF_RGB_FORMAT_RGBA: - state->rgbOffsetBytesR = state->rgbChannelBytes * 0; - state->rgbOffsetBytesG = state->rgbChannelBytes * 1; - state->rgbOffsetBytesB = state->rgbChannelBytes * 2; - state->rgbOffsetBytesA = state->rgbChannelBytes * 3; - break; - case AVIF_RGB_FORMAT_ARGB: - state->rgbOffsetBytesA = state->rgbChannelBytes * 0; - state->rgbOffsetBytesR = state->rgbChannelBytes * 1; - state->rgbOffsetBytesG = state->rgbChannelBytes * 2; - state->rgbOffsetBytesB = state->rgbChannelBytes * 3; - break; - case AVIF_RGB_FORMAT_BGR: - state->rgbOffsetBytesB = state->rgbChannelBytes * 0; - state->rgbOffsetBytesG = state->rgbChannelBytes * 1; - state->rgbOffsetBytesR = state->rgbChannelBytes * 2; - state->rgbOffsetBytesA = 0; - break; - case AVIF_RGB_FORMAT_BGRA: - state->rgbOffsetBytesB = state->rgbChannelBytes * 0; - state->rgbOffsetBytesG = state->rgbChannelBytes * 1; - state->rgbOffsetBytesR = state->rgbChannelBytes * 2; - state->rgbOffsetBytesA = state->rgbChannelBytes * 3; - break; - case AVIF_RGB_FORMAT_ABGR: - state->rgbOffsetBytesA = state->rgbChannelBytes * 0; - state->rgbOffsetBytesB = state->rgbChannelBytes * 1; - state->rgbOffsetBytesG = state->rgbChannelBytes * 2; - state->rgbOffsetBytesR = state->rgbChannelBytes * 3; - break; - - default: - return AVIF_FALSE; - } - - state->yuvDepth = image->depth; - state->yuvRange = image->yuvRange; - state->yuvMaxChannel = (1 << image->depth) - 1; - state->rgbMaxChannel = (1 << rgb->depth) - 1; - state->rgbMaxChannelF = (float)state->rgbMaxChannel; - state->biasY = (state->yuvRange == AVIF_RANGE_LIMITED) ? (float)(16 << (state->yuvDepth - 8)) : 0.0f; - state->biasUV = (float)(1 << (state->yuvDepth - 1)); - state->biasA = (image->alphaRange == AVIF_RANGE_LIMITED) ? (float)(16 << (state->yuvDepth - 8)) : 0.0f; - state->rangeY = (float)((state->yuvRange == AVIF_RANGE_LIMITED) ? (219 << (state->yuvDepth - 8)) : state->yuvMaxChannel); - state->rangeUV = (float)((state->yuvRange == AVIF_RANGE_LIMITED) ? (224 << (state->yuvDepth - 8)) : state->yuvMaxChannel); - state->rangeA = (float)((image->alphaRange == AVIF_RANGE_LIMITED) ? (219 << (state->yuvDepth - 8)) : state->yuvMaxChannel); - - uint32_t cpCount = 1 << image->depth; - if (state->mode == AVIF_REFORMAT_MODE_IDENTITY) { - for (uint32_t cp = 0; cp < cpCount; ++cp) { - state->unormFloatTableY[cp] = ((float)cp - state->biasY) / state->rangeY; - state->unormFloatTableUV[cp] = ((float)cp - state->biasY) / state->rangeY; - } - } else { - for (uint32_t cp = 0; cp < cpCount; ++cp) { - // Review this when implementing YCgCo limited range support. - state->unormFloatTableY[cp] = ((float)cp - state->biasY) / state->rangeY; - state->unormFloatTableUV[cp] = ((float)cp - state->biasUV) / state->rangeUV; - } - } - - state->toRGBAlphaMode = AVIF_ALPHA_MULTIPLY_MODE_NO_OP; - if (image->alphaPlane) { - if (!avifRGBFormatHasAlpha(rgb->format) || rgb->ignoreAlpha) { - // if we are converting some image with alpha into a format without alpha, we should do 'premultiply alpha' before - // discarding alpha plane. This has the same effect of rendering this image on a black background, which makes sense. - if (!image->alphaPremultiplied) { - state->toRGBAlphaMode = AVIF_ALPHA_MULTIPLY_MODE_MULTIPLY; - } - } else { - if (!image->alphaPremultiplied && rgb->alphaPremultiplied) { - state->toRGBAlphaMode = AVIF_ALPHA_MULTIPLY_MODE_MULTIPLY; - } else if (image->alphaPremultiplied && !rgb->alphaPremultiplied) { - state->toRGBAlphaMode = AVIF_ALPHA_MULTIPLY_MODE_UNMULTIPLY; - } - } - } - - return AVIF_TRUE; } - -static void SetupConversionInfo(avifImage * avif, +static void SetupConversionInfo(const avifImage * avif, avifReformatState* state, vImage_YpCbCrToARGBMatrix* matrix, vImage_YpCbCrPixelRange* pixelRange) { - avifRGBImage emptyRGBImage = { - .width = avif->width, - .height = avif->height, - .depth = avif->depth, - .format = AVIF_RGB_FORMAT_ARGB, - - .pixels = NULL, - .rowBytes = 0, - }; - avifPrepareReformatState(avif, &emptyRGBImage, state); + PrepareReformatState(avif, state); // Setup Matrix matrix->Yp = 1.0f; @@ -317,7 +186,7 @@ static CGImageRef CreateCGImage8(avifImage * avif) { vImage_YpCbCrToARGB convInfo = {0}; - resultBufferData = calloc(components * rowBytes * avif->height, sizeof(uint8_t)); + resultBufferData = calloc(rowBytes * avif->height, sizeof(uint8_t)); if(resultBufferData == NULL) { goto end_all; } @@ -386,6 +255,7 @@ static CGImageRef CreateCGImage8(avifImage * avif) { uint8_t const permuteMap[4] = {0, 1, 2, 3}; switch(avif->yuvFormat) { case AVIF_PIXEL_FORMAT_NONE: + case AVIF_PIXEL_FORMAT_COUNT: NSLog(@"Invalid pixel format."); goto end_all; case AVIF_PIXEL_FORMAT_YUV420: @@ -567,51 +437,11 @@ static CGImageRef CreateCGImage8(avifImage * avif) { if(hasAlpha) { // alpha vImage_Buffer alphaBuffer = {0}; - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - float* floatAlphaBufferData = NULL; - floatAlphaBufferData = calloc(avif->width * avif->height, sizeof(float)); - scaledAlphaBufferData = calloc(avif->width * avif->height, sizeof(uint8_t)); - if(floatAlphaBufferData == NULL || scaledAlphaBufferData == NULL) { - err = kvImageMemoryAllocationError; - goto end_prepare_alpha; - } - vImage_Buffer origAlphaBuffer = { - .data = avif->alphaPlane, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->alphaRowBytes, - }; - vImage_Buffer floatAlphaBuffer = { - .data = floatAlphaBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(float), - }; - alphaBuffer.width = avif->width; - alphaBuffer.height = avif->height; - alphaBuffer.data = scaledAlphaBufferData; - alphaBuffer.rowBytes = avif->width * sizeof(uint8_t); - err = vImageConvert_Planar8toPlanarF(&origAlphaBuffer, &floatAlphaBuffer, 255.0f, 0.0f, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from uint8 to float: %ld", err); - goto end_prepare_alpha; - } - err = vImageConvert_PlanarFtoPlanar8(&floatAlphaBuffer, &alphaBuffer, 235.0f, 16.0f, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from float to uint8: %ld", err); - goto end_prepare_alpha; - } - end_prepare_alpha: - free(floatAlphaBufferData); - if(err != kvImageNoError) { - goto end_alpha; - } - } else { - alphaBuffer.width = avif->width; - alphaBuffer.height = avif->height; - alphaBuffer.data = avif->alphaPlane; - alphaBuffer.rowBytes = avif->alphaRowBytes; - } + // libavif 0.11.0: alphaRange field was removed from the avifImage struct. It it presumed that alpha plane is always full range. + alphaBuffer.width = avif->width; + alphaBuffer.height = avif->height; + alphaBuffer.data = avif->alphaPlane; + alphaBuffer.rowBytes = avif->alphaRowBytes; if(monochrome) { // alpha_mono uint8_t* tmpBufferData = NULL; uint8_t* monoBufferData = NULL; @@ -704,11 +534,11 @@ static CGImageRef CreateCGImage8(avifImage * avif) { } end_all: - free(resultBufferData); - free(argbBufferData); - free(dummyCbData); - free(dummyCrData); - free(scaledAlphaBufferData); + if (resultBufferData) free(resultBufferData); + if (argbBufferData) free(argbBufferData); + if (dummyCbData) free(dummyCbData); + if (dummyCrData) free(dummyCrData); + if (scaledAlphaBufferData) free(scaledAlphaBufferData); return result; } @@ -861,22 +691,13 @@ static CGImageRef CreateCGImage16U(avifImage * avif) { }; float offset = 0.0f; float rangeMax = 0.0f; + // libavif 0.11.0: alphaRange field was removed from the avifImage struct. It it presumed that alpha plane is always full range. if(avif->depth == 10) { - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - offset = 64.0f; - rangeMax = 940.0f; - } else { - offset = 0.0f; - rangeMax = 1023.0f; - } + offset = 0.0f; + rangeMax = 1023.0f; } else if(avif->depth == 12) { - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - offset = 256.0f; - rangeMax = 3760.0f; - } else { - offset = 0.0f; - rangeMax = 4095.0f; - } + offset = 0.0f; + rangeMax = 4095.0f; } float const scale = (float)(rangeMax - offset) / 65535.0f; err = vImageConvert_16UToF(&origAlpha, &floatAlphaBuffer, 0.0f, 1.0f, kvImageNoFlags); @@ -919,6 +740,7 @@ static CGImageRef CreateCGImage16U(avifImage * avif) { uint8_t const permuteMap[4] = {0, 1, 2, 3}; switch(avif->yuvFormat) { case AVIF_PIXEL_FORMAT_NONE: + case AVIF_PIXEL_FORMAT_COUNT: NSLog(@"Invalid pixel format."); goto end_all; case AVIF_PIXEL_FORMAT_YUV420: @@ -1129,6 +951,7 @@ static CGImageRef CreateCGImage16U(avifImage * avif) { err = vImageConvert_Planar8toARGB8888(&alphaBuffer1, &alphaBuffer2, &monoBuffer1, &monoBuffer2, &resultBuffer, kvImageNoFlags); if(err != kvImageNoError) { free(resultBufferData); + resultBufferData = NULL; NSLog(@"Failed to convert Planar Alpha + Mono to MonoA: %ld", err); goto end_alpha_mono; } diff --git a/SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h b/SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h index b146fd1..79a84df 100644 --- a/SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h +++ b/SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h @@ -13,6 +13,12 @@ static const SDImageFormat SDImageFormatAVIF = 15; // AV1-codec based HEIF +/// A `avifCodecChoice` enum which specify the custom codec for AVIF decoding, defaults to 0 (`AVIF_CODEC_CHOICE_AUTO`) +FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderAVIFDecodeCodecChoice; + +/// A `avifCodecChoice` enum which specify the custom codec for AVIF encoding, defaults to 0 (`AVIF_CODEC_CHOICE_AUTO`) +FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderAVIFEncodeCodecChoice; + /// Supports AVIF static image and AVIFS animated image @interface SDImageAVIFCoder : NSObject diff --git a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m index 4ef1d67..d413328 100644 --- a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m +++ b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m @@ -9,11 +9,11 @@ #import #import #import -#if __has_include() +#if __has_include() && __has_include() #import #import #else -#import "avif/avifs.h" +#import "avif/avif.h" #import "avif/internal.h" #endif @@ -61,13 +61,19 @@ #endif #endif +SDImageCoderOption _Nonnull const SDImageCoderAVIFDecodeCodecChoice = @"avifDecodeCodecChoice"; +SDImageCoderOption _Nonnull const SDImageCoderAVIFEncodeCodecChoice = @"avifEncodeCodecChoice"; + @implementation SDImageAVIFCoder { avifDecoder *_decoder; NSData *_imageData; CGFloat _scale; NSUInteger _loopCount; NSUInteger _frameCount; + BOOL _hasAnimation; SD_LOCK_DECLARE(_lock); + BOOL _preserveAspectRatio; + CGSize _thumbnailSize; } - (void)dealloc { @@ -93,17 +99,43 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(SDImageCoderOptions *) if (!data) { return nil; } + BOOL decodeFirstFrame = [options[SDImageCoderDecodeFirstFrameOnly] boolValue]; CGFloat scale = 1; - if ([options valueForKey:SDImageCoderDecodeScaleFactor]) { - scale = [[options valueForKey:SDImageCoderDecodeScaleFactor] doubleValue]; + NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor]; + if (scaleFactor != nil) { + scale = [scaleFactor doubleValue]; if (scale < 1) { scale = 1; } } + CGSize thumbnailSize = CGSizeZero; + NSValue *thumbnailSizeValue = options[SDImageCoderDecodeThumbnailPixelSize]; + if (thumbnailSizeValue != nil) { +#if SD_MAC + thumbnailSize = thumbnailSizeValue.sizeValue; +#else + thumbnailSize = thumbnailSizeValue.CGSizeValue; +#endif + } + + BOOL preserveAspectRatio = YES; + NSNumber *preserveAspectRatioValue = options[SDImageCoderDecodePreserveAspectRatio]; + if (preserveAspectRatioValue != nil) { + preserveAspectRatio = preserveAspectRatioValue.boolValue; + } + + avifCodecChoice codecChoice = AVIF_CODEC_CHOICE_AUTO; + NSNumber *codecChoiceValue = options[SDImageCoderAVIFDecodeCodecChoice]; + if (codecChoiceValue != nil) { + codecChoice = [codecChoiceValue intValue]; + } + // Decode it avifDecoder * decoder = avifDecoderCreate(); avifDecoderSetIOMemory(decoder, data.bytes, data.length); + decoder->maxThreads = 2; + decoder->codecChoice = codecChoice; // Disable strict mode to keep some AVIF image compatible decoder->strictFlags = AVIF_STRICT_DISABLED; avifResult decodeResult = avifDecoderParse(decoder); @@ -113,16 +145,29 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(SDImageCoderOptions *) return nil; } + BOOL hasAnimation = decoder->imageCount > 1; + uint32_t width = decoder->image->width; + uint32_t height = decoder->image->height; + CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(width, height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO]; + // Static image - if (decoder->imageCount <= 1) { + if (!hasAnimation || decodeFirstFrame) { avifResult nextImageResult = avifDecoderNextImage(decoder); if (nextImageResult != AVIF_RESULT_OK) { NSLog(@"Failed to decode image: %s", avifResultToString(nextImageResult)); avifDecoderDestroy(decoder); return nil; } - CGImageRef imageRef = SDCreateCGImageFromAVIF(decoder->image); + CGImageRef originImageRef = SDCreateCGImageFromAVIF(decoder->image); + if (!originImageRef) { + avifDecoderDestroy(decoder); + return nil; + } + // TODO: optimization using vImageScale directly during transform + CGImageRef imageRef = [SDImageCoderHelper CGImageCreateScaled:originImageRef size:scaledSize]; + CGImageRelease(originImageRef); if (!imageRef) { + avifDecoderDestroy(decoder); return nil; } #if SD_MAC @@ -131,6 +176,7 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(SDImageCoderOptions *) UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp]; #endif CGImageRelease(imageRef); + avifDecoderDestroy(decoder); return image; } @@ -138,7 +184,13 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(SDImageCoderOptions *) NSMutableArray *frames = [NSMutableArray array]; while (avifDecoderNextImage(decoder) == AVIF_RESULT_OK) { @autoreleasepool { - CGImageRef imageRef = SDCreateCGImageFromAVIF(decoder->image); + CGImageRef originImageRef = SDCreateCGImageFromAVIF(decoder->image); + if (!originImageRef) { + continue; + } + // TODO: optimization using vImageScale directly during transform + CGImageRef imageRef = [SDImageCoderHelper CGImageCreateScaled:originImageRef size:scaledSize]; + CGImageRelease(originImageRef); if (!imageRef) { continue; } @@ -147,6 +199,7 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(SDImageCoderOptions *) #else UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp]; #endif + CGImageRelease(imageRef); NSTimeInterval duration = decoder->imageTiming.duration; // Should use `decoder->imageTiming`, not the `decoder->duration`, see libavif source code SDImageFrame *frame = [SDImageFrame frameWithImage:image duration:duration]; [frames addObject:frame]; @@ -208,12 +261,14 @@ - (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDIm vImage_Buffer src; v_error = vImageBuffer_InitWithCGImage(&src, &srcFormat, NULL, imageRef, kvImageNoFlags); if (v_error != kvImageNoError) { + vImageConverter_Release(convertor); return nil; } vImage_Buffer dest; - vImageBuffer_Init(&dest, height, width, hasAlpha ? 32 : 24, kvImageNoFlags); - if (!dest.data) { - free(src.data); + v_error = vImageBuffer_Init(&dest, height, width, hasAlpha ? 32 : 24, kvImageNoFlags); + if (v_error != kvImageNoError) { + if (src.data) free(src.data); + vImageConverter_Release(convertor); return nil; } @@ -222,15 +277,21 @@ - (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDIm free(src.data); vImageConverter_Release(convertor); if (v_error != kvImageNoError) { - free(dest.data); + if(dest.data) free(dest.data); return nil; } + avifCodecChoice codecChoice = AVIF_CODEC_CHOICE_AUTO; + NSNumber *codecChoiceValue = options[SDImageCoderAVIFEncodeCodecChoice]; + if (codecChoiceValue != nil) { + codecChoice = [codecChoiceValue intValue]; + } + avifPixelFormat avifFormat = AVIF_PIXEL_FORMAT_YUV444; avifImage *avif = avifImageCreate((int)width, (int)height, 8, avifFormat); if (!avif) { - free(dest.data); + if (dest.data) free(dest.data); return nil; } avifRGBImage rgb = { @@ -243,7 +304,6 @@ - (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDIm }; avifImageRGBToYUV(avif, &rgb); free(dest.data); - dest.data = NULL; NSData *iccProfile = (__bridge_transfer NSData *)CGColorSpaceCopyICCProfile([SDImageCoderHelper colorSpaceGetDeviceRGB]); @@ -257,19 +317,23 @@ - (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDIm avifRWData raw = AVIF_DATA_EMPTY; avifEncoder *encoder = avifEncoderCreate(); + encoder->codecChoice = codecChoice; encoder->minQuantizer = rescaledQuality; encoder->maxQuantizer = rescaledQuality; + encoder->minQuantizerAlpha = rescaledQuality; + encoder->maxQuantizerAlpha = rescaledQuality; encoder->maxThreads = 2; avifResult result = avifEncoderWrite(encoder, avif, &raw); + avifImageDestroy(avif); + avifEncoderDestroy(encoder); if (result != AVIF_RESULT_OK) { - avifEncoderDestroy(encoder); + if (raw.data) avifRWDataFree(&raw); return nil; } NSData *imageData = [NSData dataWithBytes:raw.data length:raw.size]; - free(raw.data); - avifEncoderDestroy(encoder); + avifRWDataFree(&raw); return imageData; } @@ -278,8 +342,15 @@ - (nullable NSData *)encodedDataWithImage:(nullable UIImage *)image format:(SDIm - (instancetype)initWithAnimatedImageData:(NSData *)data options:(SDImageCoderOptions *)options { self = [super init]; if (self) { + avifCodecChoice codecChoice = AVIF_CODEC_CHOICE_AUTO; + NSNumber *codecChoiceValue = options[SDImageCoderAVIFDecodeCodecChoice]; + if (codecChoiceValue != nil) { + codecChoice = [codecChoiceValue intValue]; + } avifDecoder *decoder = avifDecoderCreate(); avifDecoderSetIOMemory(decoder, data.bytes, data.length); + decoder->maxThreads = 2; + decoder->codecChoice = codecChoice; // Disable strict mode to keep some AVIF image compatible decoder->strictFlags = AVIF_STRICT_DISABLED; avifResult decodeResult = avifDecoderParse(decoder); @@ -291,6 +362,7 @@ - (instancetype)initWithAnimatedImageData:(NSData *)data options:(SDImageCoderOp // TODO: Optimize the performance like WebPCoder (frame meta cache, etc) _frameCount = decoder->imageCount; _loopCount = 0; + _hasAnimation = decoder->imageCount > 1; CGFloat scale = 1; NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor]; if (scaleFactor != nil) { @@ -300,6 +372,22 @@ - (instancetype)initWithAnimatedImageData:(NSData *)data options:(SDImageCoderOp } } _scale = scale; + CGSize thumbnailSize = CGSizeZero; + NSValue *thumbnailSizeValue = options[SDImageCoderDecodeThumbnailPixelSize]; + if (thumbnailSizeValue != nil) { + #if SD_MAC + thumbnailSize = thumbnailSizeValue.sizeValue; + #else + thumbnailSize = thumbnailSizeValue.CGSizeValue; + #endif + } + _thumbnailSize = thumbnailSize; + BOOL preserveAspectRatio = YES; + NSNumber *preserveAspectRatioValue = options[SDImageCoderDecodePreserveAspectRatio]; + if (preserveAspectRatioValue != nil) { + preserveAspectRatio = preserveAspectRatioValue.boolValue; + } + _preserveAspectRatio = preserveAspectRatio; _decoder = decoder; _imageData = data; SD_LOCK_INIT(_lock); @@ -340,13 +428,25 @@ - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index { if (index >= _frameCount) { return nil; } + uint32_t width = 0; + uint32_t height = 0; SD_LOCK(_lock); avifResult decodeResult = avifDecoderNthImage(_decoder, (uint32_t)index); if (decodeResult != AVIF_RESULT_OK) { + SD_UNLOCK(_lock); return nil; } - CGImageRef imageRef = SDCreateCGImageFromAVIF(_decoder->image); + width = _decoder->image->width; + height = _decoder->image->height; + CGImageRef originImageRef = SDCreateCGImageFromAVIF(_decoder->image); SD_UNLOCK(_lock); + if (!originImageRef) { + return nil; + } + CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(width, height) scaleSize:_thumbnailSize preserveAspectRatio:_preserveAspectRatio shouldScaleUp:NO]; + // TODO: optimization using vImageScale directly during transform + CGImageRef imageRef = [SDImageCoderHelper CGImageCreateScaled:originImageRef size:scaledSize]; + CGImageRelease(originImageRef); if (!imageRef) { return nil; } diff --git a/SDWebImageAVIFCoder/Module/Info.plist b/SDWebImageAVIFCoder/Module/Info.plist index a8f353a..e10219e 100644 --- a/SDWebImageAVIFCoder/Module/Info.plist +++ b/SDWebImageAVIFCoder/Module/Info.plist @@ -15,8 +15,8 @@ CFBundlePackageType FMWK CFBundleShortVersionString - 0.9.0 + 0.11.1 CFBundleVersion - 0.9.0 + 0.11.1 diff --git a/_Pods.xcodeproj b/_Pods.xcodeproj deleted file mode 120000 index 3c5a8e7..0000000 --- a/_Pods.xcodeproj +++ /dev/null @@ -1 +0,0 @@ -Example/Pods/Pods.xcodeproj \ No newline at end of file