Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ final class CameraSessionPresetsTests: XCTestCase {
description: "Expected lockForConfiguration called")

let videoSessionMock = MockCaptureSession()
videoSessionMock.canSetSessionPresetStub = { _ in true }
videoSessionMock.setSessionPresetStub = { preset in
if preset == expectedPreset {
presetExpectation.fulfill()
Expand All @@ -29,15 +30,18 @@ final class CameraSessionPresetsTests: XCTestCase {
let captureFormatMock = MockCaptureDeviceFormat()
let captureDeviceMock = MockCaptureDevice()
captureDeviceMock.flutterFormats = [captureFormatMock]
captureDeviceMock.flutterActiveFormat = captureFormatMock
var currentFormat: CaptureDeviceFormat = captureFormatMock
captureDeviceMock.activeFormatStub = {
return currentFormat
}
captureDeviceMock.lockForConfigurationStub = {
lockForConfigurationExpectation.fulfill()
}

let configuration = CameraTestUtils.createTestCameraConfiguration()
configuration.videoCaptureDeviceFactory = { _ in captureDeviceMock }
configuration.videoDimensionsConverter = { format in
return CMVideoDimensions(width: 1, height: 1)
configuration.videoDimensionsConverter = { _ in
return CMVideoDimensions(width: 4, height: 3)
}
configuration.videoCaptureSession = videoSessionMock
configuration.mediaSettings = CameraTestUtils.createDefaultMediaSettings(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -301,4 +301,60 @@ final class CameraSettingsTests: XCTestCase {
"Audio session should receive AVCaptureAudioDataOutput when enableAudio is true"
)
}

func testResolutionPresetWithMax_mustIgnoreLossyFormatsAndSquares() {
let videoSessionMock = MockCaptureSession()
videoSessionMock.canSetSessionPresetStub = { _ in true }

let lossyFormat = MockCaptureDeviceFormat(
codecType: 1_651_798_066, // 'btp2'
width: 4224,
height: 3024
)
let squareFormat = MockCaptureDeviceFormat(
codecType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
width: 4032,
height: 4032
)
let safe4KFormat = MockCaptureDeviceFormat(
codecType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
width: 3840,
height: 2160
)

let captureDeviceMock = MockCaptureDevice()
captureDeviceMock.flutterFormats = [lossyFormat, squareFormat, safe4KFormat]

var currentFormat: CaptureDeviceFormat = safe4KFormat
captureDeviceMock.activeFormatStub = { currentFormat }
captureDeviceMock.setActiveFormatStub = { newFormat in currentFormat = newFormat }

let configuration = CameraTestUtils.createTestCameraConfiguration()
configuration.videoCaptureDeviceFactory = { _ in captureDeviceMock }
configuration.videoCaptureSession = videoSessionMock

configuration.videoDimensionsConverter = { format in
return CMVideoFormatDescriptionGetDimensions(format.formatDescription)
}

configuration.mediaSettings = CameraTestUtils.createDefaultMediaSettings(
resolutionPreset: PlatformResolutionPreset.max
)

let _ = CameraTestUtils.createTestCamera(configuration)

let selectedFormat = captureDeviceMock.flutterActiveFormat
let selectedDimensions = CMVideoFormatDescriptionGetDimensions(selectedFormat.formatDescription)

XCTAssertEqual(
selectedDimensions.width,
3840,
"Camera should have ignored the lossy and square formats, safely falling back to 4K."
)
XCTAssertEqual(
selectedDimensions.height,
2160,
"Camera should have ignored the lossy and square formats, safely falling back to 4K."
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,12 @@ final class MockCaptureDeviceFormat: NSObject, CaptureDeviceFormat {
allocator: kCFAllocatorDefault, codecType: kCVPixelFormatType_32BGRA, width: 1920,
height: 1080, extensions: nil, formatDescriptionOut: &_formatDescription)
}

init(codecType: OSType, width: Int32, height: Int32) {
super.init()

CMVideoFormatDescriptionCreate(
allocator: kCFAllocatorDefault, codecType: codecType, width: width,
height: height, extensions: nil, formatDescriptionOut: &_formatDescription)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class MockCaptureVideoDataOutput: NSObject, CaptureVideoDataOutput {
var avOutput = AVCaptureVideoDataOutput()
var alwaysDiscardsLateVideoFrames = false
var videoSettings: [String: Any]! = [:]
var availableVideoPixelFormatTypes: [FourCharCode] = []

var connectionWithMediaTypeStub: ((AVMediaType) -> CaptureConnection?)?

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import UIKit
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i needed to add this to compile


@testable import camera_avfoundation

// Import Objective-C part of the implementation when SwiftPM is used.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ protocol CaptureVideoDataOutput: CaptureOutput {
/// Corresponds to the `alwaysDiscardsLateVideoFrames` property of `AVCaptureVideoDataOutput`
var alwaysDiscardsLateVideoFrames: Bool { get set }

/// Corresponds to the `availableVideoPixelFormatTypes` property of `AVCaptureVideoDataOutput`
var availableVideoPixelFormatTypes: [FourCharCode] { get }

/// Corresponds to the `videoSettings` property of `AVCaptureVideoDataOutput`
var videoSettings: [String: Any]! { get set }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ final class DefaultCamera: NSObject, Camera {
audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset
}

/// Finds the highest available resolution in terms of pixel count for the given device.
/// Finds the highest available 4:3 resolution in terms of pixel count for the given device.
/// Preferred are formats with the same subtype as current activeFormat.
private func highestResolutionFormat(forCaptureDevice captureDevice: CaptureDevice)
-> CaptureDeviceFormat?
Expand All @@ -351,12 +351,31 @@ final class DefaultCamera: NSObject, Camera {
var maxPixelCount: UInt = 0
var isBestSubTypePreferred = false

let unsupportedSubTypes: [FourCharCode] = [
1_651_798_066 // Hex for 'btp2', or kCVPixelFormatType_96VersatileBayerPacked12
]

for format in captureDevice.flutterFormats {
let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription)

// Skip formats that will crash the Flutter Engine
if unsupportedSubTypes.contains(subType) {
continue
}

let resolution = videoDimensionsConverter(format)
let height = UInt(resolution.height)
let width = UInt(resolution.width)
let ratio =
Double(max(resolution.width, resolution.height))
/ Double(min(resolution.width, resolution.height))
let is4x3 = abs(ratio - 4.0 / 3.0) < 0.05

if !is4x3 {
continue
}

let pixelCount = height * width
let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription)
let isSubTypePreferred = subType == preferredSubType

if pixelCount > maxPixelCount
Expand All @@ -367,7 +386,6 @@ final class DefaultCamera: NSObject, Camera {
isBestSubTypePreferred = isSubTypePreferred
}
}

return bestFormat
}

Expand Down