Skip to content

[camera_avfoundation] Implementation swift migration - part 9 #9645

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.9.20+5

* Migrates `startVideoRecording`, `setUpVideoRecording`, and `setupWriter` methods to Swift.

## 0.9.20+4

* Migrates `setVideoFormat`,`stopVideoRecording`, and `stopImageStream` methods to Swift.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,153 @@ final class DefaultCamera: FLTCam, Camera {
audioCaptureSession.stopRunning()
}

func startVideoRecording(
completion: @escaping (FlutterError?) -> Void,
messengerForStreaming messenger: FlutterBinaryMessenger?
) {
guard !isRecording else {
completion(
FlutterError(
code: "Error",
message: "Video is already recording",
details: nil))
return
}

if let messenger = messenger {
startImageStream(with: messenger) { [weak self] error in
self?.setUpVideoRecording(completion: completion)
}
return
}

setUpVideoRecording(completion: completion)
}

/// Main logic to setup the video recording.
private func setUpVideoRecording(completion: @escaping (FlutterError?) -> Void) {
let videoRecordingPath: String
do {
videoRecordingPath = try getTemporaryFilePath(
withExtension: "mp4",
subfolder: "videos",
prefix: "REC_")
self.videoRecordingPath = videoRecordingPath
} catch let error as NSError {
completion(DefaultCamera.flutterErrorFromNSError(error))
return
}

guard setupWriter(forPath: videoRecordingPath) else {
completion(
FlutterError(
code: "IOError",
message: "Setup Writer Failed",
details: nil))
return
}

// startWriting should not be called in didOutputSampleBuffer where it can cause state
// in which isRecording is true but videoWriter.status is .unknown
// in stopVideoRecording if it is called after startVideoRecording but before
// didOutputSampleBuffer had chance to call startWriting and lag at start of video
// https://github.com/flutter/flutter/issues/132016
// https://github.com/flutter/flutter/issues/151319
videoWriter?.startWriting()
isFirstVideoSample = true
isRecording = true
isRecordingPaused = false
videoTimeOffset = CMTime.zero
audioTimeOffset = CMTime.zero
videoIsDisconnected = false
audioIsDisconnected = false
completion(nil)
}

private func setupWriter(forPath path: String) -> Bool {
setUpCaptureSessionForAudioIfNeeded()

var error: NSError?
videoWriter = assetWriterFactory(URL(fileURLWithPath: path), AVFileType.mp4, &error)

guard let videoWriter = videoWriter else {
if let error = error {
reportErrorMessage(error.description)
}
return false
}

var videoSettings = mediaSettingsAVWrapper.recommendedVideoSettingsForAssetWriter(
withFileType:
AVFileType.mp4,
for: captureVideoOutput
)

if mediaSettings.videoBitrate != nil || mediaSettings.framesPerSecond != nil {
var compressionProperties: [String: Any] = [:]

if let videoBitrate = mediaSettings.videoBitrate {
compressionProperties[AVVideoAverageBitRateKey] = videoBitrate
}

if let framesPerSecond = mediaSettings.framesPerSecond {
compressionProperties[AVVideoExpectedSourceFrameRateKey] = framesPerSecond
}

videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties
}

let videoWriterInput = mediaSettingsAVWrapper.assetWriterVideoInput(
withOutputSettings: videoSettings)
self.videoWriterInput = videoWriterInput

let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: videoFormat
]

videoAdaptor = inputPixelBufferAdaptorFactory(videoWriterInput, sourcePixelBufferAttributes)

videoWriterInput.expectsMediaDataInRealTime = true

// Add the audio input
if mediaSettings.enableAudio {
var audioChannelLayout = AudioChannelLayout()
audioChannelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono

let audioChannelLayoutData = withUnsafeBytes(of: &audioChannelLayout) { Data($0) }

var audioSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100.0,
AVNumberOfChannelsKey: 1,
AVChannelLayoutKey: audioChannelLayoutData,
]

if let audioBitrate = mediaSettings.audioBitrate {
audioSettings[AVEncoderBitRateKey] = audioBitrate
}

let newAudioWriterInput = mediaSettingsAVWrapper.assetWriterAudioInput(
withOutputSettings: audioSettings)
newAudioWriterInput.expectsMediaDataInRealTime = true
mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter)
self.audioWriterInput = newAudioWriterInput
audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)
}

if flashMode == .torch {
try? captureDevice.lockForConfiguration()
captureDevice.torchMode = .on
captureDevice.unlockForConfiguration()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

curious if lock fails, will unlock crash?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unlocking without existing lock doesn't crash (I've tested it without calling lock first, but I assume failed lock would behave the same), but setting torchMode without acquiring a lock does. Also, neither locking nor unlocking twice crashes either. I won't address this during the migration, but I will open an issue for it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

}

mediaSettingsAVWrapper.addInput(videoWriterInput, to: videoWriter)

captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)

return true
}

func pauseVideoRecording() {
isRecordingPaused = true
videoIsDisconnected = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,11 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureAudioDataOutputSampleBufferDelegate>

@property(readonly, nonatomic) int64_t textureId;
@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;

@property(readonly, nonatomic) CGSize captureSize;
@property(strong, nonatomic)
NSObject<FLTAssetWriterInputPixelBufferAdaptor> *assetWriterPixelBufferAdaptor;
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property(assign, nonatomic) BOOL isAudioSetup;

/// The queue on which captured photos (not videos) are written to disk.
Expand All @@ -47,8 +44,6 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory;
@property(nonatomic, copy) AssetWriterFactory assetWriterFactory;
@property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory;
/// Reports the given error message to the Dart side of the plugin.
///
/// Can be called from any thread.
Expand Down Expand Up @@ -412,57 +407,6 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
return bestFormat;
}

/// Main logic to setup the video recording.
- (void)setUpVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion {
NSError *error;
_videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4"
subfolder:@"videos"
prefix:@"REC_"
error:&error];
if (error) {
completion(FlutterErrorFromNSError(error));
return;
}
if (![self setupWriterForPath:_videoRecordingPath]) {
completion([FlutterError errorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil]);
return;
}
// startWriting should not be called in didOutputSampleBuffer where it can cause state
// in which _isRecording is YES but _videoWriter.status is AVAssetWriterStatusUnknown
// in stopVideoRecording if it is called after startVideoRecording but before
// didOutputSampleBuffer had chance to call startWriting and lag at start of video
// https://github.com/flutter/flutter/issues/132016
// https://github.com/flutter/flutter/issues/151319
[_videoWriter startWriting];
_isFirstVideoSample = YES;
_isRecording = YES;
_isRecordingPaused = NO;
_videoTimeOffset = CMTimeMake(0, 1);
_audioTimeOffset = CMTimeMake(0, 1);
_videoIsDisconnected = NO;
_audioIsDisconnected = NO;
completion(nil);
}

- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger {
if (!_isRecording) {
if (messenger != nil) {
[self startImageStreamWithMessenger:messenger
completion:^(FlutterError *_Nullable error) {
[self setUpVideoRecordingWithCompletion:completion];
}];
return;
}

[self setUpVideoRecordingWithCompletion:completion];
} else {
completion([FlutterError errorWithCode:@"Error"
message:@"Video is already recording"
details:nil]);
}
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
completion:(void (^)(FlutterError *))completion {
[self startImageStreamWithMessenger:messenger
Expand Down Expand Up @@ -510,91 +454,6 @@ - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messen
}
}

- (BOOL)setupWriterForPath:(NSString *)path {
NSError *error = nil;
NSURL *outputURL;
if (path != nil) {
outputURL = [NSURL fileURLWithPath:path];
} else {
return NO;
}

[self setUpCaptureSessionForAudioIfNeeded];

_videoWriter = _assetWriterFactory(outputURL, AVFileTypeMPEG4, &error);

NSParameterAssert(_videoWriter);
if (error) {
[self reportErrorMessage:error.description];
return NO;
}

NSMutableDictionary<NSString *, id> *videoSettings = [[_mediaSettingsAVWrapper
recommendedVideoSettingsForAssetWriterWithFileType:AVFileTypeMPEG4
forOutput:_captureVideoOutput] mutableCopy];

if (_mediaSettings.videoBitrate || _mediaSettings.framesPerSecond) {
NSMutableDictionary *compressionProperties = [[NSMutableDictionary alloc] init];

if (_mediaSettings.videoBitrate) {
compressionProperties[AVVideoAverageBitRateKey] = _mediaSettings.videoBitrate;
}

if (_mediaSettings.framesPerSecond) {
compressionProperties[AVVideoExpectedSourceFrameRateKey] = _mediaSettings.framesPerSecond;
}

videoSettings[AVVideoCompressionPropertiesKey] = compressionProperties;
}

_videoWriterInput =
[_mediaSettingsAVWrapper assetWriterVideoInputWithOutputSettings:videoSettings];

_videoAdaptor = _inputPixelBufferAdaptorFactory(
_videoWriterInput, @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)});

NSParameterAssert(_videoWriterInput);

_videoWriterInput.expectsMediaDataInRealTime = YES;

// Add the audio input
if (_mediaSettings.enableAudio) {
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSMutableDictionary *audioOutputSettings = [@{
AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
AVSampleRateKey : [NSNumber numberWithFloat:44100.0],
AVNumberOfChannelsKey : [NSNumber numberWithInt:1],
AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)],
} mutableCopy];

if (_mediaSettings.audioBitrate) {
audioOutputSettings[AVEncoderBitRateKey] = _mediaSettings.audioBitrate;
}

_audioWriterInput =
[_mediaSettingsAVWrapper assetWriterAudioInputWithOutputSettings:audioOutputSettings];

_audioWriterInput.expectsMediaDataInRealTime = YES;

[_mediaSettingsAVWrapper addInput:_audioWriterInput toAssetWriter:_videoWriter];
[_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
}

if (self.flashMode == FCPPlatformFlashModeTorch) {
[self.captureDevice lockForConfiguration:nil];
[self.captureDevice setTorchMode:AVCaptureTorchModeOn];
[self.captureDevice unlockForConfiguration];
}

[_mediaSettingsAVWrapper addInput:_videoWriterInput toAssetWriter:_videoWriter];

[_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];

return YES;
}

// This function, although slightly modified, is also in video_player_avfoundation.
// Both need to do the same thing and run on the same thread (for example main thread).
// Configure application wide audio session manually to prevent overwriting flag
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,27 +60,29 @@ NS_ASSUME_NONNULL_BEGIN
@property(readonly, nonatomic) NSObject<FLTCaptureDeviceInputFactory> *captureDeviceInputFactory;
/// All FLTCam's state access and capture session related operations should be on run on this queue.
@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
@property(nonatomic, copy) AssetWriterFactory assetWriterFactory;
@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;
@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
@property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;

/// Initializes an `FLTCam` instance with the given configuration.
/// @param error report to the caller if any error happened creating the camera.
- (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error;

- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable,
FlutterError *_Nullable))completion;
/// Starts recording a video with an optional streaming messenger.
/// If the messenger is non-nil then it will be called for each
/// captured frame, allowing streaming concurrently with recording.
///
/// @param messenger Nullable messenger for capturing each frame.
- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger;

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
completion:(nonnull void (^)(FlutterError *_Nullable))completion;
- (void)setUpCaptureSessionForAudioIfNeeded;

// Methods exposed for the Swift DefaultCamera subclass
- (void)updateOrientation;
- (nullable NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
subfolder:(NSString *)subfolder
prefix:(NSString *)prefix
error:(NSError **)error;

@end

Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.20+4
version: 0.9.20+5

environment:
sdk: ^3.6.0
Expand Down