diff --git a/Package.swift b/Package.swift index 3ea312104..f3ee95372 100644 --- a/Package.swift +++ b/Package.swift @@ -15,6 +15,14 @@ let package = Package( name: "SnapshotTesting", targets: ["SnapshotTesting"] ), + .library( + name: "SnapshotTestingPlugin", + targets: ["SnapshotTestingPlugin"] + ), + .library( + name: "ImageSerializationPlugin", + targets: ["ImageSerializationPlugin"] + ), .library( name: "InlineSnapshotTesting", targets: ["InlineSnapshotTesting"] @@ -25,7 +33,16 @@ let package = Package( ], targets: [ .target( - name: "SnapshotTesting" + name: "SnapshotTesting", + dependencies: [ + "ImageSerializationPlugin", + "SnapshotTestingPlugin" + ] + ), + .target(name: "SnapshotTestingPlugin"), + .target( + name: "ImageSerializationPlugin", + dependencies: ["SnapshotTestingPlugin"] ), .target( name: "InlineSnapshotTesting", diff --git a/README.md b/README.md index 5782f945a..eec2a07d3 100644 --- a/README.md +++ b/README.md @@ -230,7 +230,7 @@ targets: [ [available-strategies]: https://swiftpackageindex.com/pointfreeco/swift-snapshot-testing/main/documentation/snapshottesting/snapshotting [defining-strategies]: https://swiftpackageindex.com/pointfreeco/swift-snapshot-testing/main/documentation/snapshottesting/customstrategies -## Plug-ins +## Strategies / Plug-ins - [AccessibilitySnapshot](https://github.com/cashapp/AccessibilitySnapshot) adds easy regression testing for iOS accessibility. @@ -273,6 +273,18 @@ targets: [ - [SnapshotVision](https://github.com/gregersson/swift-snapshot-testing-vision) adds snapshot strategy for text recognition on views and images. Uses Apples Vision framework. + - [Image Serialization Plugin - HEIC](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) allow all the + strategy that create image as output to store them in `.heic` storage format which reduces file sizes + in comparison to PNG. + + - [Image Serialization Plugin - WEBP](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) allow all the + strategy that create image as output to store them in `.webp` storage format which reduces file sizes + in comparison to PNG. + + - [Image Serialization Plugin - JXL](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) allow all the + strategy that create image as output to store them in `.jxl` storage format which reduces file sizes + in comparison to PNG. + Have you written your own SnapshotTesting plug-in? [Add it here](https://github.com/pointfreeco/swift-snapshot-testing/edit/master/README.md) and submit a pull request! diff --git a/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift b/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift new file mode 100644 index 000000000..08508a9e8 --- /dev/null +++ b/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift @@ -0,0 +1,87 @@ +#if canImport(SwiftUI) +import Foundation +import SnapshotTestingPlugin + +#if canImport(UIKit) +import UIKit.UIImage +/// A type alias for `UIImage` when UIKit is available. +public typealias SnapImage = UIImage +#elseif canImport(AppKit) +import AppKit.NSImage +/// A type alias for `NSImage` when AppKit is available. +public typealias SnapImage = NSImage +#endif + +/// A type alias that combines `ImageSerialization` and `SnapshotTestingPlugin` protocols. +/// +/// `ImageSerializationPlugin` is a convenient alias used to conform to both `ImageSerialization` and `SnapshotTestingPlugin` protocols. +/// This allows for image serialization plugins that also support snapshot testing, leveraging the Objective-C runtime while maintaining image serialization capabilities. +public typealias ImageSerializationPlugin = ImageSerialization & SnapshotTestingPlugin + +// TODO: async throws will be added later to encodeImage and decodeImage +/// A protocol that defines methods for encoding and decoding images in various formats. +/// +/// The `ImageSerialization` protocol is intended for classes that provide functionality to serialize (encode) and deserialize (decode) images. +/// Implementing this protocol allows a class to specify the image format it supports and to handle image data conversions. +/// This protocol is designed to be used in environments where SwiftUI is available and supports platform-specific image types via `SnapImage`. +public protocol ImageSerialization { + + /// The image format that the serialization plugin supports. + /// + /// Each conforming class must specify the format it handles, using the `ImageSerializationFormat` enum. This property helps the `ImageSerializer` + /// determine which plugin to use for a given format during image encoding and decoding. + static var imageFormat: ImageSerializationFormat { get } + + /// Encodes a `SnapImage` into a data representation. + /// + /// This method converts the provided image into the appropriate data format. It may eventually support asynchronous operations and error handling using `async throws`. + /// + /// - Parameter image: The image to be encoded. + /// - Returns: The encoded image data, or `nil` if encoding fails. + func encodeImage(_ image: SnapImage) -> Data? + + /// Decodes image data into a `SnapImage`. + /// + /// This method converts the provided data back into an image. It may eventually support asynchronous operations and error handling using `async throws`. + /// + /// - Parameter data: The image data to be decoded. + /// - Returns: The decoded image, or `nil` if decoding fails. + func decodeImage(_ data: Data) -> SnapImage? +} +#endif + +/// An enumeration that defines the image formats supported by the `ImageSerialization` protocol. +/// +/// The `ImageSerializationFormat` enum is used to represent various image formats. It includes a predefined case for PNG images and a flexible case for plugins, +/// allowing for the extension of formats via plugins identified by unique string values. +public enum ImageSerializationFormat: RawRepresentable, Sendable, Equatable { + + public static let defaultValue: ImageSerializationFormat = .png + + /// Represents the default image format aka PNG. + case png + + /// Represents a custom image format provided by a plugin. + /// + /// This case allows for the extension of image formats beyond the predefined ones by using a unique string identifier. + case plugins(String) + + /// Initializes an `ImageSerializationFormat` instance from a raw string value. + /// + /// This initializer converts a string value into an appropriate `ImageSerializationFormat` case. + /// + /// - Parameter rawValue: The string representation of the image format. + public init?(rawValue: String) { + self = rawValue == "png" ? .png : .plugins(rawValue) + } + + /// The raw string value of the `ImageSerializationFormat`. + /// + /// This computed property returns the string representation of the current image format. + public var rawValue: String { + switch self { + case .png: return "png" + case let .plugins(value): return value + } + } +} diff --git a/Sources/SnapshotTesting/AssertSnapshot.swift b/Sources/SnapshotTesting/AssertSnapshot.swift index fefe1e6f7..51dde150e 100644 --- a/Sources/SnapshotTesting/AssertSnapshot.swift +++ b/Sources/SnapshotTesting/AssertSnapshot.swift @@ -1,9 +1,41 @@ import XCTest +import ImageSerializationPlugin #if canImport(Testing) import Testing #endif +/// Whether or not to change the default output image format to something else. +public var imageFormat: ImageSerializationFormat { + get { + _imageFormat + } + set { _imageFormat = newValue } +} + +@_spi(Internals) +public var _imageFormat: ImageSerializationFormat { + get { +#if canImport(Testing) + if let test = Test.current { + for trait in test.traits.reversed() { + if let diffTool = (trait as? _SnapshotsTestTrait)?.configuration.imageFormat { + return diffTool + } + } + } +#endif + return __imageFormat + } + set { + __imageFormat = newValue + } +} + +@_spi(Internals) +public var __imageFormat: ImageSerializationFormat = .defaultValue + + /// Enhances failure messages with a command line diff tool expression that can be copied and pasted /// into a terminal. @available( diff --git a/Sources/SnapshotTesting/Documentation.docc/Articles/ImageSerializationPlugin.md b/Sources/SnapshotTesting/Documentation.docc/Articles/ImageSerializationPlugin.md new file mode 100644 index 000000000..a29aa9fac --- /dev/null +++ b/Sources/SnapshotTesting/Documentation.docc/Articles/ImageSerializationPlugin.md @@ -0,0 +1,86 @@ +# Image Serialization Plugin + +The **Image Serialization Plugin** extends the functionality of the SnapshotTesting library by enabling support for multiple image formats through a plugin architecture. This PluginAPI allows image encoding and decoding to be easily extended without modifying the core logic of the system. + +## Overview + +The **Image Serialization Plugin** provides an interface for encoding and decoding images in various formats. By conforming to both the `ImageSerialization` and `SnapshotTestingPlugin` protocols, it integrates with the broader plugin system, allowing for the seamless addition of new image formats. The default implementation supports PNG, but this architecture allows users to define custom plugins for other formats. + +### Image Serialization Plugin Architecture + +The **Image Serialization Plugin** relies on the PluginAPI that is a combination of protocols and a centralized registry to manage and discover plugins. The architecture allows for dynamic registration of image serialization plugins, which can be automatically discovered at runtime using the Objective-C runtime. This makes the system highly extensible, with plugins being automatically registered without the need for manual intervention. + +#### Key Components: + +1. **`ImageSerialization` Protocol**: + - Defines the core methods for encoding and decoding images. + - Requires plugins to specify the image format they support using the `ImageSerializationFormat` enum. + - Provides methods for encoding (`encodeImage`) and decoding (`decodeImage`) images. + +2. **`ImageSerializationFormat` Enum**: + - Represents supported image formats. + - Includes predefined formats such as `.png` and extensible formats through the `.plugins(String)` case, allowing for custom formats to be introduced via plugins. + +3. **`ImageSerializer` Class**: + - Responsible for encoding and decoding images using the registered plugins. + - Retrieves available plugins from the `PluginRegistry` and uses the first matching plugin for the requested image format. + - Provides default implementations for PNG encoding and decoding if no plugin is available for a given format. + +#### Example Plugin Flow: + +1. **Plugin Discovery**: +- On Apple platforms Plugins are automatically discovered at runtime through the Objective-C runtime, which identifies classes that conform to both the `ImageSerialization` and `SnapshotTestingPlugin` protocols. + +2. **Plugin Registration**: +- Each plugin registers itself with the `PluginRegistry`, allowing it to be retrieved when needed for image serialization. + +3. **Image Encoding/Decoding**: +- When an image needs to be serialized, the `ImageSerializer` checks the available plugins for one that supports the requested format. +- If no plugin is found, it defaults to the built-in PNG encoding/decoding methods. + +#### Extensibility + +The plugin architecture allows developers to introduce new image formats without modifying the core SnapshotTesting library. By creating a new plugin that conforms to `ImageSerializationPlugin`, you can easily add support for additional image formats. + +Here are a few example plugins demonstrating how to extend the library with new image formats: + +- **[Image Serialization Plugin - HEIC](https://github.com/mackoj/swift-snapshot-testing-plugin-heic)**: Enables storing images in the `.heic` format, which reduces file sizes compared to PNG. +- **[Image Serialization Plugin - WEBP](https://github.com/mackoj/swift-snapshot-testing-plugin-webp)**: Allows storing images in the `.webp` format, which offers better compression than PNG. +- **[Image Serialization Plugin - JXL](https://github.com/mackoj/swift-snapshot-testing-plugin-jxl)**: Facilitates storing images in the `.jxl` format, which provides superior compression and quality compared to PNG. + +## Usage + +For example, if you want to use JPEG XL as a new image format for your snapshots, you can follow these steps. This approach applies to any image format as long as you have a plugin that conforms to `ImageSerializationPlugin`. + +1. **Add the Dependency**: Include the appropriate image serialization plugin as a dependency in your `Package.swift` file. For JPEG XL, it would look like this: + + ```swift + .package(url: "https://github.com/mackoj/swift-snapshot-testing-plugin-jxl.git", revision: "0.0.1"), + ``` + +2. **Link to Your Test Target**: Add the image serialization plugin to your test target's dependencies: + + ```swift + .product(name: "JXLImageSerializer", package: "swift-snapshot-testing-plugin-jxl"), + ``` + +3. **Import and Set Up**: In your test file, import the serializer and configure the image format in the `setUp()` method: + + ```swift + import JXLImageSerializer + + override class func setUp() { + SnapshotTesting.imageFormat = JXLImageSerializer.imageFormat + } + ``` + +> [!IMPORTANT] +> On **non** Apple platform you will need to call `PluginRegistry.registerPlugin(YourPlugin.init())` to register it. + + Alternatively, you can specify the image format for individual assertions: + + ```swift + assertSnapshot(of: label, as: .image(precision: 0.9, format: JXLImageSerializer.imageFormat)) + ``` + +This setup demonstrates how to integrate a specific image format plugin. Replace `JXLImageSerializer` with the appropriate plugin and format for other image formats. diff --git a/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md b/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md new file mode 100644 index 000000000..529e2fcd2 --- /dev/null +++ b/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md @@ -0,0 +1,22 @@ +# Plugins + +SnapshotTesting offers a wide range of built-in snapshot strategies, and over the years, third-party developers have introduced new ones. However, when there’s a need for functionality that spans multiple strategies, plugins become essential. + +## Overview + +Plugins provide greater flexibility and extensibility by enabling shared behavior across different strategies without the need to duplicate code or modify each strategy individually. They can be dynamically discovered, registered, and executed at runtime, making them ideal for adding new functionality without altering the core system. This architecture promotes modularity and decoupling, allowing features to be easily added or swapped out without impacting existing functionality. + +### Plugin architecture + +The plugin architecture is designed around the concept of **dynamic discovery and registration**. Plugins conform to specific protocols, such as `SnapshotTestingPlugin`, and are registered automatically by the `PluginRegistry`. This registry manages plugin instances, allowing them to be retrieved by identifier or filtered by the protocols they conform to. + +The primary components of the plugin system include: + +- **Plugin Protocols**: Define the behavior that plugins must implement. +- **PluginRegistry**: Manages plugin discovery, registration, and retrieval. +- **Objective-C Runtime Integration**: Allows automatic discovery of plugins that conform to specific protocols. + +> [!IMPORTANT] +> On **non** Apple platform you will need to call `PluginRegistry.registerPlugin(YourPlugin.init())` to register your plugin. + +The `PluginRegistry` is a singleton that registers plugins during its initialization. Plugins can be retrieved by their identifier or cast to specific types, allowing flexible interaction. diff --git a/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md b/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md index 8704d920d..801197e14 100644 --- a/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md +++ b/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md @@ -23,6 +23,11 @@ Powerfully flexible snapshot testing. - ``withSnapshotTesting(record:diffTool:operation:)-2kuyr`` - ``SnapshotTestingConfiguration`` +### Plugins + +- +- + ### Deprecations - diff --git a/Sources/SnapshotTesting/Plugins/ImageSerializer.swift b/Sources/SnapshotTesting/Plugins/ImageSerializer.swift new file mode 100644 index 000000000..2e80a36e7 --- /dev/null +++ b/Sources/SnapshotTesting/Plugins/ImageSerializer.swift @@ -0,0 +1,97 @@ +#if canImport(SwiftUI) +import Foundation +import ImageSerializationPlugin + +#if canImport(UIKit) +import UIKit +#elseif canImport(AppKit) +import AppKit +#endif + +/// A class responsible for encoding and decoding images using various image serialization plugins. +/// +/// The `ImageSerializer` class leverages plugins that conform to the `ImageSerialization` protocol to encode and decode images in different formats. +/// It automatically retrieves all available image serialization plugins from the `PluginRegistry` and uses them based on the specified `ImageSerializationFormat`. +/// If no plugin is found for the requested format, it defaults to using PNG encoding/decoding. +final class ImageSerializer { + + /// A collection of plugins that conform to the `ImageSerialization` protocol. + private let plugins: [ImageSerialization] + + init() { + self.plugins = PluginRegistry.allPlugins() + } + + // TODO: async throws will be added later + /// Encodes a given image into the specified image format using the appropriate plugin. + /// + /// This method attempts to encode the provided `SnapImage` into the desired format using the first plugin that supports the specified `ImageSerializationFormat`. + /// If no plugin is found for the format, it defaults to encoding the image as PNG. + /// + /// - Parameters: + /// - image: The `SnapImage` to encode. + /// - imageFormat: The format in which to encode the image. + /// - Returns: The encoded image data, or `nil` if encoding fails. + func encodeImage(_ image: SnapImage, imageFormat: ImageSerializationFormat = .defaultValue) -> Data? { + if let plugin = self.plugins.first(where: { type(of: $0).imageFormat == imageFormat }) { + return plugin.encodeImage(image) + } + + // Default to PNG + return encodePNG(image) + } + + // TODO: async throws will be added later + /// Decodes image data into a `SnapImage` using the appropriate plugin based on the specified image format. + /// + /// This method attempts to decode the provided data into a `SnapImage` using the first plugin that supports the specified `ImageSerializationFormat`. + /// If no plugin is found for the format, it defaults to decoding the data as PNG. + /// + /// - Parameters: + /// - data: The image data to decode. + /// - imageFormat: The format in which the image data is encoded. + /// - Returns: The decoded `SnapImage`, or `nil` if decoding fails. + func decodeImage(_ data: Data, imageFormat: ImageSerializationFormat = .defaultValue) -> SnapImage? { + if let plugin = self.plugins.first(where: { type(of: $0).imageFormat == imageFormat }) { + return plugin.decodeImage(data) + } + + // Default to PNG + return decodePNG(data) + } + + // MARK: - Actual default Image Serializer + + /// Encodes a `SnapImage` as PNG data. + /// + /// This method provides a default implementation for encoding images as PNG. It is used as a fallback if no suitable plugin is found for the requested format. + /// + /// - Parameter image: The `SnapImage` to encode. + /// - Returns: The encoded PNG data, or `nil` if encoding fails. + private func encodePNG(_ image: SnapImage) -> Data? { +#if canImport(UIKit) + return image.pngData() +#elseif canImport(AppKit) + guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { + return nil + } + let bitmapRep = NSBitmapImageRep(cgImage: cgImage) + return bitmapRep.representation(using: .png, properties: [:]) +#endif + } + + /// Decodes PNG data into a `SnapImage`. + /// + /// This method provides a default implementation for decoding PNG data into a `SnapImage`. It is used as a fallback if no suitable plugin is found for the requested format. + /// + /// - Parameter data: The PNG data to decode. + /// - Returns: The decoded `SnapImage`, or `nil` if decoding fails. + private func decodePNG(_ data: Data) -> SnapImage? { +#if canImport(UIKit) + return UIImage(data: data) +#elseif canImport(AppKit) + return NSImage(data: data) +#endif + } +} +#endif diff --git a/Sources/SnapshotTesting/Plugins/PluginRegistry.swift b/Sources/SnapshotTesting/Plugins/PluginRegistry.swift new file mode 100644 index 000000000..5006a4154 --- /dev/null +++ b/Sources/SnapshotTesting/Plugins/PluginRegistry.swift @@ -0,0 +1,122 @@ +#if canImport(SwiftUI) +import Foundation +import SnapshotTestingPlugin + +/// A singleton class responsible for managing and registering plugins conforming to the `SnapshotTestingPlugin` protocol. +/// +/// The `PluginRegistry` automatically discovers and registers classes conforming to the `SnapshotTestingPlugin` protocol +/// within the Objective-C runtime. It allows retrieval of specific plugins by identifier, access to all registered plugins, +/// and filtering of plugins that conform to the `ImageSerialization` protocol. +public class PluginRegistry { + + /// Shared singleton instance of `PluginRegistry`. + private static let shared = PluginRegistry() + + /// Dictionary holding registered plugins, keyed by their identifier. + private var plugins: [String: AnyObject] = [:] + + /// Private initializer enforcing the singleton pattern. + /// + /// Automatically triggers `automaticPluginRegistration()` to discover and register plugins. + private init() { + #if canImport(ObjectiveC) + defer { automaticPluginRegistration() } + #else + print("Manual plugin registration is required. Call `PluginRegistry.registerPlugin(YourPlugin.init())`.") + #endif + } + + // MARK: - Internal Methods + + /// Registers a plugin. + /// + /// - Parameter plugin: An instance conforming to `SnapshotTestingPlugin`. + public static func registerPlugin(_ plugin: any SnapshotTestingPlugin) { + PluginRegistry.shared.registerPlugin(plugin) + } + + /// Retrieves a plugin by its identifier, casting it to the specified type. + /// + /// - Parameter identifier: The unique identifier for the plugin. + /// - Returns: The plugin instance cast to `Output` if found and castable, otherwise `nil`. + static func plugin(for identifier: String) -> Output? { + PluginRegistry.shared.plugin(for: identifier) + } + + /// Returns all registered plugins cast to the specified type. + /// + /// - Returns: An array of all registered plugins that can be cast to `Output`. + static func allPlugins() -> [Output] { + PluginRegistry.shared.allPlugins() + } + + // MARK: - Internal Methods + + /// Registers a plugin. + /// + /// - Parameter plugin: An instance conforming to `SnapshotTestingPlugin`. + private func registerPlugin(_ plugin: SnapshotTestingPlugin) { + plugins[type(of: plugin).identifier] = plugin + } + + /// Retrieves a plugin by its identifier, casting it to the specified type. + /// + /// - Parameter identifier: The unique identifier for the plugin. + /// - Returns: The plugin instance cast to `Output` if found and castable, otherwise `nil`. + private func plugin(for identifier: String) -> Output? { + return plugins[identifier] as? Output + } + + /// Returns all registered plugins cast to the specified type. + /// + /// - Returns: An array of all registered plugins that can be cast to `Output`. + private func allPlugins() -> [Output] { + return plugins.values.compactMap { $0 as? Output } + } + + // TEST-ONLY Reset Method + #if DEBUG + internal static func reset() { + shared.plugins.removeAll() + } + #endif + + #if DEBUG && canImport(ObjectiveC) + // Used for test only + internal static func automaticPluginRegistration() { + shared.automaticPluginRegistration() + } + #endif +} +#endif + +#if canImport(ObjectiveC) +import ObjectiveC.runtime + +extension PluginRegistry { + + /// Discovers and registers all classes conforming to the `SnapshotTestingPlugin` protocol. + /// + /// This method iterates over all Objective-C runtime classes, identifying those that conform to `SnapshotTestingPlugin`, + /// instantiating them, and registering them as plugins. + func automaticPluginRegistration() { + let classCount = objc_getClassList(nil, 0) + guard classCount > 0 else { return } + + let classes = UnsafeMutablePointer.allocate(capacity: Int(classCount)) + defer { classes.deallocate() } + + let autoreleasingClasses = AutoreleasingUnsafeMutablePointer(classes) + objc_getClassList(autoreleasingClasses, classCount) + + for i in 0..( record: SnapshotTestingConfiguration.Record? = nil, diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil, operation: () throws -> R ) rethrows -> R { try SnapshotTestingConfiguration.$current.withValue( SnapshotTestingConfiguration( record: record ?? SnapshotTestingConfiguration.current?.record ?? _record, - diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool - ?? SnapshotTesting._diffTool + diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? SnapshotTesting._diffTool, + imageFormat: imageFormat ?? SnapshotTestingConfiguration.current?.imageFormat ?? _imageFormat ) ) { try operation() @@ -41,16 +45,18 @@ public func withSnapshotTesting( /// Customizes `assertSnapshot` for the duration of an asynchronous operation. /// -/// See ``withSnapshotTesting(record:diffTool:operation:)-2kuyr`` for more information. +/// See ``withSnapshotTesting(record:diffTool:imageFormat:operation:)-2kuyr`` for more information. public func withSnapshotTesting( record: SnapshotTestingConfiguration.Record? = nil, diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil, operation: () async throws -> R ) async rethrows -> R { try await SnapshotTestingConfiguration.$current.withValue( SnapshotTestingConfiguration( record: record ?? SnapshotTestingConfiguration.current?.record ?? _record, - diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? _diffTool + diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? _diffTool, + imageFormat: imageFormat ?? SnapshotTestingConfiguration.current?.imageFormat ?? _imageFormat ) ) { try await operation() @@ -71,13 +77,18 @@ public struct SnapshotTestingConfiguration: Sendable { /// /// See ``Record-swift.struct`` for more information. public var record: Record? + + /// The image format to use while encoding/decoding snapshot tests. + public var imageFormat: ImageSerializationFormat? public init( record: Record?, - diffTool: DiffTool? + diffTool: DiffTool?, + imageFormat: ImageSerializationFormat? ) { self.diffTool = diffTool self.record = record + self.imageFormat = imageFormat } /// The record mode of the snapshot test. diff --git a/Sources/SnapshotTesting/SnapshotsTestTrait.swift b/Sources/SnapshotTesting/SnapshotsTestTrait.swift index 4b53db67c..b9559298b 100644 --- a/Sources/SnapshotTesting/SnapshotsTestTrait.swift +++ b/Sources/SnapshotTesting/SnapshotsTestTrait.swift @@ -1,4 +1,6 @@ #if canImport(Testing) + + import ImageSerializationPlugin import Testing extension Trait where Self == _SnapshotsTestTrait { @@ -9,12 +11,14 @@ /// - diffTool: The diff tool to use in failure messages. public static func snapshots( record: SnapshotTestingConfiguration.Record? = nil, - diffTool: SnapshotTestingConfiguration.DiffTool? = nil + diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil ) -> Self { _SnapshotsTestTrait( configuration: SnapshotTestingConfiguration( record: record, - diffTool: diffTool + diffTool: diffTool, + imageFormat: imageFormat ) ) } diff --git a/Sources/SnapshotTesting/Snapshotting/CALayer.swift b/Sources/SnapshotTesting/Snapshotting/CALayer.swift index 74c512c12..5f4f4a1bf 100644 --- a/Sources/SnapshotTesting/Snapshotting/CALayer.swift +++ b/Sources/SnapshotTesting/Snapshotting/CALayer.swift @@ -1,3 +1,4 @@ +import ImageSerializationPlugin #if os(macOS) import AppKit import Cocoa @@ -14,7 +15,7 @@ /// assertSnapshot(of: layer, as: .image(precision: 0.99)) /// ``` public static var image: Snapshotting { - return .image(precision: 1) + return .image(precision: 1, imageFormat: imageFormat) } /// A snapshot strategy for comparing layers based on pixel equality. @@ -25,9 +26,9 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { layer in let image = NSImage(size: layer.bounds.size) image.lockFocus() @@ -46,7 +47,7 @@ extension Snapshotting where Value == CALayer, Format == UIImage { /// A snapshot strategy for comparing layers based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing layers based on pixel equality. @@ -59,12 +60,12 @@ /// human eye. /// - traits: A trait collection override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init() + precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init(), imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).pullback { layer in renderer(bounds: layer.bounds, for: traits).image { ctx in layer.setNeedsLayout() diff --git a/Sources/SnapshotTesting/Snapshotting/CGPath.swift b/Sources/SnapshotTesting/Snapshotting/CGPath.swift index 65470605c..368ab5196 100644 --- a/Sources/SnapshotTesting/Snapshotting/CGPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/CGPath.swift @@ -1,4 +1,6 @@ +import ImageSerializationPlugin #if os(macOS) + import AppKit import Cocoa import CoreGraphics @@ -6,7 +8,7 @@ extension Snapshotting where Value == CGPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -29,10 +31,11 @@ public static func image( precision: Float = 1, perceptualPrecision: Float = 1, - drawingMode: CGPathDrawingMode = .eoFill + drawingMode: CGPathDrawingMode = .eoFill, + imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { path in let bounds = path.boundingBoxOfPath var transform = CGAffineTransform(translationX: -bounds.origin.x, y: -bounds.origin.y) @@ -52,10 +55,11 @@ #elseif os(iOS) || os(tvOS) import UIKit + extension Snapshotting where Value == CGPath, Format == UIImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -68,10 +72,10 @@ /// human eye. public static func image( precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, - drawingMode: CGPathDrawingMode = .eoFill + drawingMode: CGPathDrawingMode = .eoFill, imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat ).pullback { path in let bounds = path.boundingBoxOfPath let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift index b84a59bf3..8577ef296 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSBezierPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -24,9 +25,9 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { path in // Move path info frame: let bounds = path.bounds diff --git a/Sources/SnapshotTesting/Snapshotting/NSImage.swift b/Sources/SnapshotTesting/Snapshotting/NSImage.swift index be4fd7cd4..c8978e27c 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSImage.swift @@ -1,10 +1,11 @@ #if os(macOS) import Cocoa import XCTest + import ImageSerializationPlugin extension Diffing where Value == NSImage { /// A pixel-diffing strategy for NSImage's which requires a 100% match. - public static let image = Diffing.image() + public static let image = Diffing.image(imageFormat: imageFormat) /// A pixel-diffing strategy for NSImage that allows customizing how precise the matching must be. /// @@ -15,14 +16,15 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - Returns: A new diffing strategy. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Diffing { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat = imageFormat) -> Diffing { + let imageSerializer = ImageSerializer() return .init( - toData: { NSImagePNGRepresentation($0)! }, - fromData: { NSImage(data: $0)! } + toData: { imageSerializer.encodeImage($0, imageFormat: imageFormat)! }, + fromData: { imageSerializer.decodeImage($0, imageFormat: imageFormat)! } ) { old, new in guard let message = compare( - old, new, precision: precision, perceptualPrecision: perceptualPrecision) + old, new, precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) else { return nil } let difference = SnapshotTesting.diff(old, new) let oldAttachment = XCTAttachment(image: old) @@ -42,7 +44,7 @@ extension Snapshotting where Value == NSImage, Format == NSImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing images based on pixel equality. @@ -53,24 +55,15 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat = imageFormat) -> Snapshotting { return .init( - pathExtension: "png", - diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision) + pathExtension: imageFormat.rawValue, + diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) ) } } - private func NSImagePNGRepresentation(_ image: NSImage) -> Data? { - guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { - return nil - } - let rep = NSBitmapImageRep(cgImage: cgImage) - rep.size = image.size - return rep.representation(using: .png, properties: [:]) - } - - private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float) + private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float, imageFormat: ImageSerializationFormat) -> String? { guard let oldCgImage = old.cgImage(forProposedRect: nil, context: nil, hints: nil) else { @@ -93,9 +86,10 @@ } let byteCount = oldContext.height * oldContext.bytesPerRow if memcmp(oldData, newData, byteCount) == 0 { return nil } + let imageSerializer = ImageSerializer() guard - let pngData = NSImagePNGRepresentation(new), - let newerCgImage = NSImage(data: pngData)?.cgImage( + let imageData = imageSerializer.encodeImage(new, imageFormat: imageFormat), + let newerCgImage = imageSerializer.decodeImage(imageData, imageFormat: imageFormat)?.cgImage( forProposedRect: nil, context: nil, hints: nil), let newerContext = context(for: newerCgImage), let newerData = newerContext.data diff --git a/Sources/SnapshotTesting/Snapshotting/NSView.swift b/Sources/SnapshotTesting/Snapshotting/NSView.swift index b2e7edfb0..b83240926 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSView.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSView.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSView, Format == NSImage { /// A snapshot strategy for comparing views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing views based on pixel equality. @@ -21,10 +22,10 @@ /// human eye. /// - size: A view size override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil + precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).asyncPullback { view in let initialSize = view.frame.size if let size = size { view.frame.size = size } diff --git a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift index 69ec72dde..2d841701c 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSViewController, Format == NSImage { /// A snapshot strategy for comparing view controller views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing view controller views based on pixel equality. @@ -18,10 +19,10 @@ /// human eye. /// - size: A view size override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil + precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, imageFormat: ImageSerializationFormat ) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, size: size + precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat ).pullback { $0.view } } } diff --git a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift index 94ff90459..758296e23 100644 --- a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift @@ -1,5 +1,6 @@ #if os(iOS) || os(macOS) || os(tvOS) import SceneKit + import ImageSerializationPlugin #if os(macOS) import Cocoa #elseif os(iOS) || os(tvOS) @@ -17,10 +18,10 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #elseif os(iOS) || os(tvOS) @@ -34,20 +35,20 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #endif extension Snapshotting where Value == SCNScene, Format == Image { - fileprivate static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize) + fileprivate static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { scene in let view = SCNView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.scene = scene diff --git a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift index ad515050a..a073f190b 100644 --- a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift @@ -1,5 +1,6 @@ #if os(iOS) || os(macOS) || os(tvOS) import SpriteKit + import ImageSerializationPlugin #if os(macOS) import Cocoa #elseif os(iOS) || os(tvOS) @@ -17,10 +18,10 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #elseif os(iOS) || os(tvOS) @@ -34,20 +35,20 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #endif extension Snapshotting where Value == SKScene, Format == Image { - fileprivate static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize) + fileprivate static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { scene in let view = SKView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.presentScene(scene) diff --git a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift index 8d85e1f0b..673ce859c 100644 --- a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift @@ -1,6 +1,7 @@ #if canImport(SwiftUI) import Foundation import SwiftUI + import ImageSerializationPlugin /// The size constraint for a snapshot (similar to `PreviewLayout`). public enum SwiftUISnapshotLayout { @@ -20,7 +21,7 @@ /// A snapshot strategy for comparing SwiftUI Views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing SwiftUI Views based on pixel equality. @@ -41,7 +42,8 @@ precision: Float = 1, perceptualPrecision: Float = 1, layout: SwiftUISnapshotLayout = .sizeThatFits, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { @@ -60,7 +62,7 @@ } return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { view in var config = config diff --git a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift index 6b48d622d..78b1891fd 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIBezierPath, Format == UIImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -17,10 +18,10 @@ /// human eye. /// - scale: The scale to use when loading the reference image from disk. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1 + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat ).pullback { path in let bounds = path.bounds let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/UIImage.swift b/Sources/SnapshotTesting/Snapshotting/UIImage.swift index 3d1bb5319..8d97de37f 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIImage.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit import XCTest + import ImageSerializationPlugin extension Diffing where Value == UIImage { /// A pixel-diffing strategy for UIImage's which requires a 100% match. - public static let image = Diffing.image() + public static let image = Diffing.image(imageFormat: imageFormat) /// A pixel-diffing strategy for UIImage that allows customizing how precise the matching must be. /// @@ -18,7 +19,7 @@ /// `UITraitCollection`s default value of `0.0`, the screens scale is used. /// - Returns: A new diffing strategy. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Diffing { let imageScale: CGFloat if let scale = scale, scale != 0.0 { @@ -26,14 +27,14 @@ } else { imageScale = UIScreen.main.scale } - + let imageSerializer = ImageSerializer() return Diffing( - toData: { $0.pngData() ?? emptyImage().pngData()! }, - fromData: { UIImage(data: $0, scale: imageScale)! } + toData: { imageSerializer.encodeImage($0, imageFormat: imageFormat) ?? emptyImage().pngData()! }, // this seems inconsistant with macOS implementation + fromData: { imageSerializer.decodeImage($0, imageFormat: imageFormat)! } // missing imageScale here ) { old, new in guard let message = compare( - old, new, precision: precision, perceptualPrecision: perceptualPrecision) + old, new, precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) else { return nil } let difference = SnapshotTesting.diff(old, new) let oldAttachment = XCTAttachment(image: old) @@ -65,7 +66,7 @@ extension Snapshotting where Value == UIImage, Format == UIImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing images based on pixel equality. @@ -78,12 +79,12 @@ /// human eye. /// - scale: The scale of the reference image stored on disk. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return .init( - pathExtension: "png", + pathExtension: imageFormat.rawValue, diffing: .image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale) + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat) ) } } @@ -93,7 +94,7 @@ private let imageContextBitsPerComponent = 8 private let imageContextBytesPerPixel = 4 - private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float) + private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float, imageFormat: ImageSerializationFormat) -> String? { guard let oldCgImage = old.cgImage else { @@ -118,9 +119,10 @@ if memcmp(oldData, newData, byteCount) == 0 { return nil } } var newerBytes = [UInt8](repeating: 0, count: byteCount) + let imageSerializer = ImageSerializer() guard - let pngData = new.pngData(), - let newerCgImage = UIImage(data: pngData)?.cgImage, + let imageData = imageSerializer.encodeImage(new, imageFormat: imageFormat), + let newerCgImage = imageSerializer.decodeImage(imageData, imageFormat: imageFormat)?.cgImage, let newerContext = context(for: newerCgImage, data: &newerBytes), let newerData = newerContext.data else { diff --git a/Sources/SnapshotTesting/Snapshotting/UIView.swift b/Sources/SnapshotTesting/Snapshotting/UIView.swift index 7244f67d1..44885d80c 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIView.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIView, Format == UIImage { /// A snapshot strategy for comparing views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing views based on pixel equality. @@ -25,13 +26,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { view in snapshotView( config: .init(safeArea: .zero, size: size ?? view.frame.size, traits: .init()), diff --git a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift index b08b8bf59..f6562b320 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIViewController, Format == UIImage { /// A snapshot strategy for comparing view controller views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing view controller views based on pixel equality. @@ -23,13 +24,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { viewController in snapshotView( config: size.map { .init(safeArea: config.safeArea, size: $0, traits: config.traits) } @@ -60,13 +62,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { viewController in snapshotView( config: .init(safeArea: .zero, size: size, traits: traits), diff --git a/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift b/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift new file mode 100644 index 000000000..a87ec2592 --- /dev/null +++ b/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift @@ -0,0 +1,46 @@ +#if canImport(Foundation) && canImport(ObjectiveC) +import Foundation + +/// A protocol that defines a plugin for snapshot testing, designed to be used in environments that support Objective-C. +/// +/// The `SnapshotTestingPlugin` protocol is intended to be adopted by classes that provide specific functionality for snapshot testing. +/// It requires each conforming class to have a unique identifier and a parameterless initializer. This protocol is designed to be used in +/// environments where both Foundation and Objective-C are available, making it compatible with Objective-C runtime features. +/// +/// Conforming classes must be marked with `@objc` to ensure compatibility with Objective-C runtime mechanisms. +@objc public protocol SnapshotTestingPlugin { + + /// A unique string identifier for the plugin. + /// + /// Each plugin must provide a static identifier that uniquely distinguishes it from other plugins. This identifier is used + /// to register and retrieve plugins within a registry, ensuring that each plugin can be easily identified and utilized. + static var identifier: String { get } + + /// Initializes a new instance of the plugin. + /// + /// This initializer is required to allow the Objective-C runtime to create instances of the plugin class when registering + /// and utilizing plugins. The initializer must not take any parameters. + init() +} +#elseif canImport(Foundation) +import Foundation + +/// A protocol that defines a plugin for snapshot testing. +/// +/// The `SnapshotTestingPlugin` protocol is intended to be adopted by classes that provide specific functionality for snapshot testing. +/// It requires each conforming class to have a unique identifier and a parameterless initializer. +public protocol SnapshotTestingPlugin: AnyObject { + + /// A unique string identifier for the plugin. + /// + /// Each plugin must provide a static identifier that uniquely distinguishes it from other plugins. This identifier is used + /// to register and retrieve plugins within a registry, ensuring that each plugin can be easily identified and utilized. + static var identifier: String { get } + + /// Initializes a new instance of the plugin. + /// + /// This initializer is required to allow the Objective-C runtime to create instances of the plugin class when registering + /// and utilizing plugins. The initializer must not take any parameters. + init() +} +#endif diff --git a/Tests/SnapshotTestingTests/ImageSerializationPluginTests.swift b/Tests/SnapshotTestingTests/ImageSerializationPluginTests.swift new file mode 100644 index 000000000..f1b567b7e --- /dev/null +++ b/Tests/SnapshotTestingTests/ImageSerializationPluginTests.swift @@ -0,0 +1,120 @@ +#if canImport(SwiftUI) +import XCTest +import SnapshotTestingPlugin +@testable import SnapshotTesting +import ImageSerializationPlugin + +#if canImport(UIKit) +import UIKit +#elseif canImport(AppKit) +import AppKit +#endif + +class MockImageSerializationPlugin: ImageSerializationPlugin { + + static var imageFormat: ImageSerializationFormat = .plugins("mock") + + func encodeImage(_ image: SnapImage) -> Data? { + return "mockImageData".data(using: .utf8) + } + + func decodeImage(_ data: Data) -> SnapImage? { + let mockImage = SnapImage() + return mockImage + } + + // MARK: - SnapshotTestingPlugin + static var identifier: String = "ImageSerializationPlugin.MockImageSerializationPlugin.mock" + required init() {} +} + +class ImageSerializerTests: XCTestCase { + + var imageSerializer: ImageSerializer! + // #E48900FF + var _1pxOrangePNGImage = Data(base64Encoded: "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAAaADAAQAAAABAAAAAQAAAAD5Ip3+AAAADUlEQVQIHWN40snwHwAGLwJteELaggAAAABJRU5ErkJggg==")! + + override func setUp() { + super.setUp() + PluginRegistry.reset() // Reset state before each test + + // Register the mock plugins in the PluginRegistry + PluginRegistry.registerPlugin(MockImageSerializationPlugin() as SnapshotTestingPlugin) + + imageSerializer = ImageSerializer() + } + + override func tearDown() { + imageSerializer = nil + PluginRegistry.reset() // Reset state after each test + super.tearDown() + } + + func testEncodeImageUsingMockPlugin() { + let mockImage = SnapImage() + let imageData = imageSerializer.encodeImage( + mockImage, + imageFormat: MockImageSerializationPlugin.imageFormat + ) + + XCTAssertNotNil(imageData, "Image data should not be nil for mock plugin.") + XCTAssertEqual(String(data: imageData!, encoding: .utf8), "mockImageData") + } + + func testDecodeImageUsingMockPlugin() { + let mockData = "mockImageData".data(using: .utf8)! + let decodedImage = imageSerializer.decodeImage( + mockData, + imageFormat: MockImageSerializationPlugin.imageFormat + ) + + XCTAssertNotNil(decodedImage, "Image should be decoded using the mock plugin.") + } + + // TODO: 1PX png image data + func testEncodeImageAsPNG() { + let mockImage = SnapImage() + let imageData = imageSerializer.encodeImage( + mockImage, + imageFormat: .png + ) + + XCTAssertNil(imageData, "The image is empty it should be nil.") + } + + func testDecodeImageAsPNG() { + let decodedImage = imageSerializer.decodeImage( + _1pxOrangePNGImage, + imageFormat: .png + ) + + XCTAssertNotNil(decodedImage, "PNG image should be decoded successfully.") + XCTAssertEqual( + decodedImage?.size.width, + 1, "PNG image should be 1x1." + ) + XCTAssertEqual( + decodedImage?.size.height, + 1, "PNG image should be 1x1." + ) + XCTAssertEqual(getFirstPixelColorHex(from: decodedImage!), "#E48900FF") + } + + func testUnknownImageFormatFallsBackToPNG() { + let mockImage = SnapImage(data: _1pxOrangePNGImage)! + let imageData = imageSerializer.encodeImage( + mockImage, + imageFormat: .plugins("unknownFormat") + ) + + XCTAssertNotNil(imageData, "Unknown format should fall back to PNG encoding.") + } + + func testPluginRegistryShouldContainRegisteredPlugins() { + let plugins = PluginRegistry.allPlugins() as [ImageSerialization] + + XCTAssertEqual(plugins.count, 1, "There should be two registered plugins.") + XCTAssertEqual(type(of: plugins[0]).imageFormat.rawValue, "mock", "The first plugin should support the 'mock' format.") + } +} +#endif diff --git a/Tests/SnapshotTestingTests/PluginRegistryAutomaticRegistrationTests.swift b/Tests/SnapshotTestingTests/PluginRegistryAutomaticRegistrationTests.swift new file mode 100644 index 000000000..4277eabc5 --- /dev/null +++ b/Tests/SnapshotTestingTests/PluginRegistryAutomaticRegistrationTests.swift @@ -0,0 +1,28 @@ +#if canImport(SwiftUI) && canImport(ObjectiveC) +import XCTest +import ObjectiveC +@testable import SnapshotTesting +import SnapshotTestingPlugin + +final class PluginRegistryAutomaticRegistrationTests: XCTestCase { + + override func setUp() { + super.setUp() + PluginRegistry.reset() // Reset state before each test + } + + override func tearDown() { + PluginRegistry.reset() // Reset state after each test + super.tearDown() + } + + func testAutomaticPluginRegistration() { + // Automatically register plugins using the Objective-C runtime + PluginRegistry.automaticPluginRegistration() + + // Verify if the mock plugin was automatically registered + let registeredPlugin: MockPlugin? = PluginRegistry.plugin(for: MockPlugin.identifier) + XCTAssertNotNil(registeredPlugin) + } +} +#endif diff --git a/Tests/SnapshotTestingTests/PluginRegistryTests.swift b/Tests/SnapshotTestingTests/PluginRegistryTests.swift new file mode 100644 index 000000000..06d00d76c --- /dev/null +++ b/Tests/SnapshotTestingTests/PluginRegistryTests.swift @@ -0,0 +1,63 @@ +#if canImport(SwiftUI) +import XCTest +@testable import SnapshotTesting +import SnapshotTestingPlugin + +class MockPlugin: NSObject, SnapshotTestingPlugin { + static var identifier: String = "MockPlugin" + + required override init() { + super.init() + } +} + +class AnotherMockPlugin: NSObject, SnapshotTestingPlugin { + static var identifier: String = "AnotherMockPlugin" + + required override init() { + super.init() + } +} + +final class PluginRegistryTests: XCTestCase { + + override func setUp() { + super.setUp() + PluginRegistry.reset() // Reset state before each test + } + + override func tearDown() { + PluginRegistry.reset() // Reset state after each test + super.tearDown() + } + + func testRegisterPlugin() { + // Register a mock plugin + PluginRegistry.registerPlugin(MockPlugin()) + + // Retrieve the plugin by identifier + let retrievedPlugin: MockPlugin? = PluginRegistry.plugin(for: MockPlugin.identifier) + XCTAssertNotNil(retrievedPlugin) + } + + func testRetrieveNonExistentPlugin() { + // Try to retrieve a non-existent plugin + let nonExistentPlugin: MockPlugin? = PluginRegistry.plugin(for: "NonExistentPlugin") + XCTAssertNil(nonExistentPlugin) + } + + func testAllPlugins() { + // Register two mock plugins + PluginRegistry.registerPlugin(MockPlugin()) + PluginRegistry.registerPlugin(AnotherMockPlugin()) + + // Retrieve all plugins + let allPlugins: [SnapshotTestingPlugin] = PluginRegistry.allPlugins() + + XCTAssertEqual(allPlugins.count, 2) + XCTAssertTrue(allPlugins.contains { $0 is MockPlugin }) + XCTAssertTrue(allPlugins.contains { $0 is AnotherMockPlugin }) + } +} + +#endif diff --git a/Tests/SnapshotTestingTests/TestHelpers.swift b/Tests/SnapshotTestingTests/TestHelpers.swift index d71c855f1..da45d297c 100644 --- a/Tests/SnapshotTestingTests/TestHelpers.swift +++ b/Tests/SnapshotTestingTests/TestHelpers.swift @@ -1,110 +1,207 @@ +#if canImport(SwiftUI) && canImport(ObjectiveC) + import XCTest @testable import SnapshotTesting #if os(iOS) - let platform = "ios" +let platform = "ios" #elseif os(tvOS) - let platform = "tvos" +let platform = "tvos" #elseif os(macOS) - let platform = "macos" - extension NSTextField { - var text: String { - get { return self.stringValue } - set { self.stringValue = newValue } - } +let platform = "macos" +extension NSTextField { + var text: String { + get { return self.stringValue } + set { self.stringValue = newValue } } +} #endif #if os(macOS) || os(iOS) || os(tvOS) - extension CGPath { - /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: - static var heart: CGPath { - let scale: CGFloat = 30.0 - let path = CGMutablePath() - - path.move(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) - path.addLine(to: CGPoint(x: 0.0 * scale, y: 2.0 * scale)) - path.addQuadCurve( - to: CGPoint(x: 1.0 * scale, y: 3.0 * scale), - control: CGPoint(x: 0.125 * scale, y: 2.875 * scale) - ) - path.addQuadCurve( - to: CGPoint(x: 2.0 * scale, y: 2.0 * scale), - control: CGPoint(x: 1.875 * scale, y: 2.875 * scale) - ) - path.addCurve( - to: CGPoint(x: 3.0 * scale, y: 1.0 * scale), - control1: CGPoint(x: 2.5 * scale, y: 2.0 * scale), - control2: CGPoint(x: 3.0 * scale, y: 1.5 * scale) - ) - path.addCurve( - to: CGPoint(x: 2.0 * scale, y: 0.0 * scale), - control1: CGPoint(x: 3.0 * scale, y: 0.5 * scale), - control2: CGPoint(x: 2.5 * scale, y: 0.0 * scale) - ) - path.addLine(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) - path.closeSubpath() - - path.addEllipse( - in: CGRect( - origin: CGPoint(x: 2.0 * scale, y: 2.0 * scale), - size: CGSize(width: scale, height: scale) - )) - - return path - } +extension CGPath { + /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: + static var heart: CGPath { + let scale: CGFloat = 30.0 + let path = CGMutablePath() + + path.move(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) + path.addLine(to: CGPoint(x: 0.0 * scale, y: 2.0 * scale)) + path.addQuadCurve( + to: CGPoint(x: 1.0 * scale, y: 3.0 * scale), + control: CGPoint(x: 0.125 * scale, y: 2.875 * scale) + ) + path.addQuadCurve( + to: CGPoint(x: 2.0 * scale, y: 2.0 * scale), + control: CGPoint(x: 1.875 * scale, y: 2.875 * scale) + ) + path.addCurve( + to: CGPoint(x: 3.0 * scale, y: 1.0 * scale), + control1: CGPoint(x: 2.5 * scale, y: 2.0 * scale), + control2: CGPoint(x: 3.0 * scale, y: 1.5 * scale) + ) + path.addCurve( + to: CGPoint(x: 2.0 * scale, y: 0.0 * scale), + control1: CGPoint(x: 3.0 * scale, y: 0.5 * scale), + control2: CGPoint(x: 2.5 * scale, y: 0.0 * scale) + ) + path.addLine(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) + path.closeSubpath() + + path.addEllipse( + in: CGRect( + origin: CGPoint(x: 2.0 * scale, y: 2.0 * scale), + size: CGSize(width: scale, height: scale) + )) + + return path } +} #endif #if os(iOS) || os(tvOS) - extension UIBezierPath { - /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: - static var heart: UIBezierPath { - UIBezierPath(cgPath: .heart) - } +extension UIBezierPath { + /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: + static var heart: UIBezierPath { + UIBezierPath(cgPath: .heart) } +} #endif #if os(macOS) - extension NSBezierPath { - /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: - static var heart: NSBezierPath { - let scale: CGFloat = 30.0 - let path = NSBezierPath() +extension NSBezierPath { + /// Creates an approximation of a heart at a 45º angle with a circle above, using all available element types: + static var heart: NSBezierPath { + let scale: CGFloat = 30.0 + let path = NSBezierPath() + + path.move(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) + path.line(to: CGPoint(x: 0.0 * scale, y: 2.0 * scale)) + path.curve( + to: CGPoint(x: 1.0 * scale, y: 3.0 * scale), + controlPoint1: CGPoint(x: 0.0 * scale, y: 2.5 * scale), + controlPoint2: CGPoint(x: 0.5 * scale, y: 3.0 * scale) + ) + path.curve( + to: CGPoint(x: 2.0 * scale, y: 2.0 * scale), + controlPoint1: CGPoint(x: 1.5 * scale, y: 3.0 * scale), + controlPoint2: CGPoint(x: 2.0 * scale, y: 2.5 * scale) + ) + path.curve( + to: CGPoint(x: 3.0 * scale, y: 1.0 * scale), + controlPoint1: CGPoint(x: 2.5 * scale, y: 2.0 * scale), + controlPoint2: CGPoint(x: 3.0 * scale, y: 1.5 * scale) + ) + path.curve( + to: CGPoint(x: 2.0 * scale, y: 0.0 * scale), + controlPoint1: CGPoint(x: 3.0 * scale, y: 0.5 * scale), + controlPoint2: CGPoint(x: 2.5 * scale, y: 0.0 * scale) + ) + path.line(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) + path.close() + + path.appendOval( + in: CGRect( + origin: CGPoint(x: 2.0 * scale, y: 2.0 * scale), + size: CGSize(width: scale, height: scale) + )) + + return path + } +} +#endif + +#if canImport(UIKit) +import UIKit - path.move(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) - path.line(to: CGPoint(x: 0.0 * scale, y: 2.0 * scale)) - path.curve( - to: CGPoint(x: 1.0 * scale, y: 3.0 * scale), - controlPoint1: CGPoint(x: 0.0 * scale, y: 2.5 * scale), - controlPoint2: CGPoint(x: 0.5 * scale, y: 3.0 * scale) - ) - path.curve( - to: CGPoint(x: 2.0 * scale, y: 2.0 * scale), - controlPoint1: CGPoint(x: 1.5 * scale, y: 3.0 * scale), - controlPoint2: CGPoint(x: 2.0 * scale, y: 2.5 * scale) - ) - path.curve( - to: CGPoint(x: 3.0 * scale, y: 1.0 * scale), - controlPoint1: CGPoint(x: 2.5 * scale, y: 2.0 * scale), - controlPoint2: CGPoint(x: 3.0 * scale, y: 1.5 * scale) - ) - path.curve( - to: CGPoint(x: 2.0 * scale, y: 0.0 * scale), - controlPoint1: CGPoint(x: 3.0 * scale, y: 0.5 * scale), - controlPoint2: CGPoint(x: 2.5 * scale, y: 0.0 * scale) - ) - path.line(to: CGPoint(x: 0.0 * scale, y: 0.0 * scale)) - path.close() +func _getFirstPixelColorHex(from image: UIImage) -> String? { + guard let cgImage = image.cgImage else { return nil } + + let pixelData = calloc(1, 4) // 4 bytes for RGBA + let colorSpace = CGColorSpaceCreateDeviceRGB() + let bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue + + guard let context = CGContext( + data: pixelData, + width: 1, + height: 1, + bitsPerComponent: 8, + bytesPerRow: 4, + space: colorSpace, + bitmapInfo: bitmapInfo + ) else { + free(pixelData) + return nil + } + + // Draw the image in the 1x1 context to get the first pixel's color + context.draw(cgImage, in: CGRect(x: 0, y: 0, width: 1, height: 1)) + + // Get the color components + let data = pixelData!.assumingMemoryBound(to: UInt8.self) + let r = data[0] + let g = data[1] + let b = data[2] + let a = data[3] + + free(pixelData) + + // Return the hex string + return String(format: "#%02X%02X%02X%02X", r, g, b, a) +} +#endif - path.appendOval( - in: CGRect( - origin: CGPoint(x: 2.0 * scale, y: 2.0 * scale), - size: CGSize(width: scale, height: scale) - )) +#if canImport(AppKit) +import AppKit - return path - } +func _getFirstPixelColorHex(from image: NSImage) -> String? { + guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return nil } + + let pixelData = calloc(1, 4) // 4 bytes for RGBA + let colorSpace = CGColorSpaceCreateDeviceRGB() + let bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue + + guard let context = CGContext( + data: pixelData, + width: 1, + height: 1, + bitsPerComponent: 8, + bytesPerRow: 4, + space: colorSpace, + bitmapInfo: bitmapInfo + ) else { + free(pixelData) + return nil } + + // Draw the image in the 1x1 context to get the first pixel's color + context.draw(cgImage, in: CGRect(x: 0, y: 0, width: 1, height: 1)) + + // Get the color components + let data = pixelData!.assumingMemoryBound(to: UInt8.self) + let r = data[0] + let g = data[1] + let b = data[2] + let a = data[3] + + free(pixelData) + + // Return the hex string + return String(format: "#%02X%02X%02X%02X", r, g, b, a) +} +#endif + +#if canImport(UIKit) +typealias SnapImage = UIImage +#elseif canImport(AppKit) +typealias SnapImage = NSImage +#endif + +func getFirstPixelColorHex(from image: SnapImage) -> String? { +#if canImport(UIKit) + return _getFirstPixelColorHex(from: image) +#elseif canImport(AppKit) + return _getFirstPixelColorHex(from: image) +#endif +} #endif