Skip to content

Commit 9bfa0aa

Browse files
stephencelisgithub-actions[bot]
authored andcommitted
Run swift-format
1 parent b30b3be commit 9bfa0aa

File tree

1 file changed

+16
-10
lines changed

1 file changed

+16
-10
lines changed

Sources/SnapshotTesting/Snapshotting/UIImage.swift

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@
220220
guard
221221
let thresholdOutputImage = try? deltaOutputImage.applyingThreshold(deltaThreshold),
222222
let averagePixel = thresholdOutputImage.applyingAreaAverage().renderSingleValue(in: context)
223-
else {
223+
else {
224224
return "Newly-taken snapshot's data could not be processed."
225225
}
226226
actualPixelPrecision = 1 - averagePixel
@@ -259,7 +259,9 @@
259259
}
260260
}
261261
}
262-
let failingPixelPercent = Float(failingPixelCount) / Float(deltaOutputImage.extent.width * deltaOutputImage.extent.height)
262+
let failingPixelPercent =
263+
Float(failingPixelCount)
264+
/ Float(deltaOutputImage.extent.width * deltaOutputImage.extent.height)
263265
actualPixelPrecision = 1 - failingPixelPercent
264266
}
265267

@@ -268,9 +270,9 @@
268270
// DeltaE is in a 0-100 scale, so we need to divide by 100 to transform it to a percentage.
269271
let minimumPerceptualPrecision = 1 - min(maximumDeltaE / 100, 1)
270272
return """
271-
The percentage of pixels that match \(actualPixelPrecision) is less than required \(pixelPrecision)
272-
The lowest perceptual color precision \(minimumPerceptualPrecision) is less than required \(perceptualPrecision)
273-
"""
273+
The percentage of pixels that match \(actualPixelPrecision) is less than required \(pixelPrecision)
274+
The lowest perceptual color precision \(minimumPerceptualPrecision) is less than required \(perceptualPrecision)
275+
"""
274276
}
275277

276278
extension CIImage {
@@ -295,15 +297,18 @@
295297
}
296298

297299
func renderSingleValue(in context: CIContext) -> Float? {
298-
guard let buffer = render(in: context) else { return nil }
299-
defer { buffer.free() }
300-
return buffer.data.load(fromByteOffset: 0, as: Float.self)
300+
guard let buffer = render(in: context) else { return nil }
301+
defer { buffer.free() }
302+
return buffer.data.load(fromByteOffset: 0, as: Float.self)
301303
}
302304

303305
func render(in context: CIContext, format: CIFormat = CIFormat.Rh) -> vImage_Buffer? {
304306
// Some hardware configurations (virtualized CPU renderers) do not support 32-bit float output formats,
305307
// so use a compatible 16-bit float format and convert the output value to 32-bit floats.
306-
guard var buffer16 = try? vImage_Buffer(width: Int(extent.width), height: Int(extent.height), bitsPerPixel: 16) else { return nil }
308+
guard
309+
var buffer16 = try? vImage_Buffer(
310+
width: Int(extent.width), height: Int(extent.height), bitsPerPixel: 16)
311+
else { return nil }
307312
defer { buffer16.free() }
308313
context.render(
309314
self,
@@ -314,7 +319,8 @@
314319
colorSpace: nil
315320
)
316321
guard
317-
var buffer32 = try? vImage_Buffer(width: Int(buffer16.width), height: Int(buffer16.height), bitsPerPixel: 32),
322+
var buffer32 = try? vImage_Buffer(
323+
width: Int(buffer16.width), height: Int(buffer16.height), bitsPerPixel: 32),
318324
vImageConvert_Planar16FtoPlanarF(&buffer16, &buffer32, 0) == kvImageNoError
319325
else { return nil }
320326
return buffer32

0 commit comments

Comments
 (0)