diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index ecd2a94daf..0664160aa0 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -176,6 +176,29 @@ extension CameraSession { // pragma MARK: Format + // Bayer/ProRes RAW formats that cannot be encoded with standard codecs. + // These formats require ProRes RAW and SMPTE RDD18 metadata. + private static let bayerFormats: Set = [ + 0x6274_7032, // btp2 - Bayer To ProRes 2 + 0x6274_7033, // btp3 - Bayer To ProRes 3 + 0x6270_3136, // bp16 - 16-bit Bayer + 0x6270_3234, // bp24 - 24-bit Bayer + 0x6270_3332, // bp32 - 32-bit Bayer + ] + + // Standard 8-bit formats (most compatible for HEVC/H.264 recording) + private static let standard8BitFormats: Set = [ + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, // 420v + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, // 420f + kCVPixelFormatType_32BGRA, // BGRA + ] + + // Standard 10-bit formats (require HEVC Main10 profile) + private static let standard10BitFormats: Set = [ + kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, // x420 + kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, // xf20 + ] + /** Configures the active format (`format`) */ @@ -193,16 +216,102 @@ extension CameraSession { return } - // Find matching format (JS Dictionary -> strongly typed Swift class) - let format = device.formats.first { targetFormat.isEqualTo(format: $0) } - guard let format else { + // Find all matching formats (JS Dictionary -> strongly typed Swift class) + let matchingFormats = device.formats.filter { targetFormat.isEqualTo(format: $0) } + guard !matchingFormats.isEmpty else { + throw CameraError.format(.invalidFormat) + } + + // Helper to get pixel format type from format description + func getPixelFormatType(_ fmt: AVCaptureDevice.Format) -> OSType { + return CMFormatDescriptionGetMediaSubType(fmt.formatDescription) + } + + // Filter out Bayer-only formats (like iPhone 16/17's special formats) + // These formats cannot be used for standard video recording + let recordableFormats = matchingFormats.filter { fmt in + let pixelFormat = getPixelFormatType(fmt) + let isBayer = CameraSession.bayerFormats.contains(pixelFormat) + if isBayer { + VisionLogger.log(level: .warning, + message: "Filtering out Bayer format: \(pixelFormat.fourCCString) " + + "(\(fmt.videoDimensions.width)x\(fmt.videoDimensions.height))") + } + return !isBayer + } + + VisionLogger.log(level: .info, + message: "Found \(matchingFormats.count) matching formats, " + + "\(recordableFormats.count) are recordable (non-Bayer)") + + // Prefer 8-bit formats first (most compatible), then 10-bit, then others. + let format: AVCaptureDevice.Format + if let format8Bit = recordableFormats.first(where: { + CameraSession.standard8BitFormats.contains(getPixelFormatType($0)) + }) { + format = format8Bit + VisionLogger.log(level: .info, + message: "Selected 8-bit format: \(getPixelFormatType(format).fourCCString) " + + "(\(format.videoDimensions.width)x\(format.videoDimensions.height))") + } else if let format10Bit = recordableFormats.first(where: { + CameraSession.standard10BitFormats.contains(getPixelFormatType($0)) + }) { + format = format10Bit + VisionLogger.log(level: .info, + message: "Selected 10-bit format: \(getPixelFormatType(format).fourCCString) " + + "(\(format.videoDimensions.width)x\(format.videoDimensions.height))") + } else if !recordableFormats.isEmpty { + // No standard format found among recordable formats, use first recordable + format = recordableFormats[0] + VisionLogger.log(level: .warning, + message: "No standard pixel format found. Using first recordable: " + + "\(getPixelFormatType(format).fourCCString)") + } else if !matchingFormats.isEmpty { + // FALLBACK: All matching formats are Bayer - find alternative at similar resolution + VisionLogger.log(level: .error, + message: "All \(matchingFormats.count) matching formats are Bayer! " + + "Looking for alternative resolution...") + + let targetWidth = matchingFormats[0].videoDimensions.width + let targetHeight = matchingFormats[0].videoDimensions.height + + let alternativeFormats = device.formats.filter { fmt in + let pixelFormat = getPixelFormatType(fmt) + return !CameraSession.bayerFormats.contains(pixelFormat) && + (CameraSession.standard8BitFormats.contains(pixelFormat) || + CameraSession.standard10BitFormats.contains(pixelFormat)) + }.sorted { a, b in + // Sort by how close they are to the target resolution + let aArea = Int(a.videoDimensions.width) * Int(a.videoDimensions.height) + let bArea = Int(b.videoDimensions.width) * Int(b.videoDimensions.height) + let targetArea = Int(targetWidth) * Int(targetHeight) + return abs(aArea - targetArea) < abs(bArea - targetArea) + } + + if let alternative = alternativeFormats.first { + format = alternative + VisionLogger.log(level: .warning, + message: "Using alternative format: \(getPixelFormatType(format).fourCCString) " + + "(\(format.videoDimensions.width)x\(format.videoDimensions.height)) " + + "instead of Bayer \(targetWidth)x\(targetHeight)") + } else { + // Last resort: use the Bayer format and hope for the best + format = matchingFormats[0] + VisionLogger.log(level: .error, + message: "No alternative found! Using Bayer format: " + + "\(getPixelFormatType(format).fourCCString) - recording will likely fail") + } + } else { throw CameraError.format(.invalidFormat) } // Set new device Format device.activeFormat = format - VisionLogger.log(level: .info, message: "Successfully configured Format!") + VisionLogger.log(level: .info, + message: "Successfully configured Format (mediaSubType: " + + "\(getPixelFormatType(format).fourCCString), " + + "dimensions: \(format.videoDimensions.width)x\(format.videoDimensions.height))") } func configureVideoOutputFormat(configuration: CameraConfiguration) { diff --git a/package/ios/Core/CameraSession+Video.swift b/package/ios/Core/CameraSession+Video.swift index 77839f6332..24c7dd8077 100644 --- a/package/ios/Core/CameraSession+Video.swift +++ b/package/ios/Core/CameraSession+Video.swift @@ -12,6 +12,15 @@ import UIKit private let INSUFFICIENT_STORAGE_ERROR_CODE = -11807 +// Bayer/ProRes RAW formats that cannot be encoded directly +private let kBayerFormats: Set = [ + 0x6274_7032, // btp2 - Bayer To ProRes 2 + 0x6274_7033, // btp3 - Bayer To ProRes 3 + 0x6270_3136, // bp16 - 16-bit Bayer + 0x6270_3234, // bp24 - 24-bit Bayer + 0x6270_3332, // bp32 - 32-bit Bayer +] + extension CameraSession { /** Starts a video + audio recording with a custom Asset Writer. @@ -39,51 +48,49 @@ extension CameraSession { // Callback for when the recording ends let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in - defer { - // Disable Audio Session again - if enableAudio { - CameraQueues.audioQueue.async { - self.deactivateAudioSession() + CameraQueues.cameraQueue.async { + defer { + if enableAudio { + CameraQueues.audioQueue.async { + self.deactivateAudioSession() + } } } - } - self.recordingSession = nil - self.recordingSizeTimer?.cancel() - self.recordingSizeTimer = nil - - if self.didCancelRecording { - VisionLogger.log(level: .info, message: "RecordingSession finished because the recording was canceled.") - onError(.capture(.recordingCanceled)) - do { - VisionLogger.log(level: .info, message: "Deleting temporary video file...") - try FileManager.default.removeItem(at: recordingSession.url) - } catch { - self.delegate?.onError(.capture(.fileError(cause: error))) + self.recordingSession = nil + self.recordingSizeTimer?.cancel() + self.recordingSizeTimer = nil + + if self.didCancelRecording { + VisionLogger.log(level: .info, message: "RecordingSession finished because the recording was canceled.") + onError(.capture(.recordingCanceled)) + do { + VisionLogger.log(level: .info, message: "Deleting temporary video file...") + try FileManager.default.removeItem(at: recordingSession.url) + } catch { + self.delegate?.onError(.capture(.fileError(cause: error))) + } + return } - return - } - VisionLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).") + VisionLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).") - if let error = error as NSError? { - VisionLogger.log(level: .error, message: "RecordingSession Error \(error.code): \(error.description)") - // Something went wrong, we have an error - if error.code == INSUFFICIENT_STORAGE_ERROR_CODE { - onError(.capture(.insufficientStorage)) - } else { - onError(.capture(.unknown(message: "An unknown recording error occured! \(error.code) \(error.description)"))) - } - } else { - if status == .completed { - // Recording was successfully saved - let video = Video(path: recordingSession.url.absoluteString, - duration: recordingSession.duration, - size: recordingSession.size) - onVideoRecorded(video) + if let error = error as NSError? { + VisionLogger.log(level: .error, message: "RecordingSession Error \(error.code): \(error.description)") + if error.code == INSUFFICIENT_STORAGE_ERROR_CODE { + onError(.capture(.insufficientStorage)) + } else { + onError(.capture(.unknown(message: "An unknown recording error occured! \(error.code) \(error.description)"))) + } } else { - // Recording wasn't saved and we don't have an error either. - onError(.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)")) + if status == .completed { + let video = Video(path: recordingSession.url.absoluteString, + duration: recordingSession.duration, + size: recordingSession.size) + onVideoRecorded(video) + } else { + onError(.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)")) + } } } } @@ -102,12 +109,11 @@ extension CameraSession { orientation: orientation, completion: onFinish) - // Init Audio + Activate Audio Session (optional) + // Init Audio if enableAudio, let audioOutput = self.audioOutput, let audioInput = self.audioDeviceInput { VisionLogger.log(level: .info, message: "Enabling Audio for Recording...") - // Activate Audio Session asynchronously CameraQueues.audioQueue.async { do { try self.activateAudioSession() @@ -116,17 +122,74 @@ extension CameraSession { } } - // Initialize audio asset writer let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: options.fileType) try recordingSession.initializeAudioTrack(withSettings: audioSettings, format: audioInput.device.activeFormat.formatDescription) } - // Init Video - let videoSettings = try videoOutput.recommendedVideoSettings(forOptions: options) + // Get device's native pixel format to detect Bayer formats + let devicePixelFormat: OSType? = self.videoDeviceInput.map { input in + CMFormatDescriptionGetMediaSubType(input.device.activeFormat.formatDescription) + } + + // Check if device format is a Bayer/ProRes RAW format + let isBayerFormat = devicePixelFormat.map { kBayerFormats.contains($0) } ?? false + + // Get available output formats + let availableFormats = videoOutput.availableVideoPixelFormatTypes + + // Check what standard formats are available + let has420f = availableFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + let has420v = availableFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) + let hasBGRA = availableFormats.contains(kCVPixelFormatType_32BGRA) + + // Select compatible output format - always force a compatible format for recording + var selectedFormat: OSType? + var forceH264 = false + + // Priority: 420f > 420v > BGRA (for best compatibility) + if has420f { + selectedFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + } else if has420v { + selectedFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + } else if hasBGRA { + selectedFormat = kCVPixelFormatType_32BGRA + forceH264 = true // BGRA works better with H.264 + } else if let firstFormat = availableFormats.first { + selectedFormat = firstFormat + } + + // Apply the selected format to video output + if let format = selectedFormat { + videoOutput.videoSettings = [ + String(kCVPixelBufferPixelFormatTypeKey): format, + ] + VisionLogger.log(level: .info, + message: "Set output format to: \(format.fourCCString) " + + "(deviceFormat: \(devicePixelFormat?.fourCCString ?? "nil"), isBayer: \(isBayerFormat))") + } + + // Get the actual pixel format that will be used + var actualPixelFormat: OSType? + if let pixelFormatValue = videoOutput.videoSettings[String(kCVPixelBufferPixelFormatTypeKey)] { + if let numberValue = pixelFormatValue as? NSNumber { + actualPixelFormat = OSType(numberValue.uint32Value) + } else if let osTypeValue = pixelFormatValue as? OSType { + actualPixelFormat = osTypeValue + } + } + + // Get video settings with proper codec selection + let videoSettings = try videoOutput.recommendedVideoSettings(forOptions: options, + devicePixelFormat: actualPixelFormat, + forceH264: forceH264) + + VisionLogger.log(level: .info, message: "Video encoder settings: \(videoSettings)") + + // Initialize video track try recordingSession.initializeVideoTrack(withSettings: videoSettings) - // start recording session with or without audio. + // Start recording try recordingSession.start() self.didCancelRecording = false self.recordingSession = recordingSession @@ -149,7 +212,8 @@ extension CameraSession { self.recordingSizeTimer = timer self.recordingSizeTimer?.resume() let end = DispatchTime.now() - VisionLogger.log(level: .info, message: "RecordingSesssion started in \(Double(end.uptimeNanoseconds - start.uptimeNanoseconds) / 1_000_000)ms!") + VisionLogger.log(level: .info, + message: "Recording started in \(Double(end.uptimeNanoseconds - start.uptimeNanoseconds) / 1_000_000)ms!") } catch let error as CameraError { onError(error) } catch let error as NSError { @@ -165,7 +229,8 @@ extension CameraSession { CameraQueues.cameraQueue.async { withPromise(promise) { guard let recordingSession = self.recordingSession else { - throw CameraError.capture(.noRecordingInProgress) + VisionLogger.log(level: .warning, message: "stopRecording() was called but there is no active recording session.") + return nil } recordingSession.stop() return nil diff --git a/package/ios/Core/Extensions/AVCaptureVideoDataOutput+recommendedVideoSettings.swift b/package/ios/Core/Extensions/AVCaptureVideoDataOutput+recommendedVideoSettings.swift index f985f2c737..9a1e83a6ef 100644 --- a/package/ios/Core/Extensions/AVCaptureVideoDataOutput+recommendedVideoSettings.swift +++ b/package/ios/Core/Extensions/AVCaptureVideoDataOutput+recommendedVideoSettings.swift @@ -8,6 +8,7 @@ import AVFoundation import Foundation +import VideoToolbox extension AVCaptureVideoDataOutput { private func supportsCodec(_ videoCodec: AVVideoCodecType, writingTo fileType: AVFileType) -> Bool { @@ -15,31 +16,138 @@ extension AVCaptureVideoDataOutput { return availableCodecs.contains(videoCodec) } + /// Returns true if the given settings dictionary uses a non-standard codec + /// (anything other than H.264 or HEVC, e.g. ProRes, ProRes RAW). + private func isNonStandardCodec(in settings: [String: Any]?) -> Bool { + guard let codecValue = settings?[AVVideoCodecKey] as? String else { return false } + return codecValue != AVVideoCodecType.h264.rawValue && codecValue != AVVideoCodecType.hevc.rawValue + } + + /// Returns true if the pixel format is a known 8-bit format (safe for HEVC Main profile) + private func isConfirmed8BitPixelFormat(_ pixelFormat: OSType?) -> Bool { + guard let pf = pixelFormat else { return false } + return [ + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, // 420v + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, // 420f + kCVPixelFormatType_32BGRA, // BGRA + kCVPixelFormatType_32ARGB, // ARGB + kCVPixelFormatType_32RGBA, // RGBA + ].contains(pf) + } + /** Get the recommended options for an [AVAssetWriter] with the desired [RecordVideoOptions]. + + - Parameter options: The recording options (codec, file type, bitrate, etc.) + - Parameter devicePixelFormat: The actual pixel format from the device's active format. + This is critical for correct HEVC profile selection on devices with 10-bit sensors (iPhone 14 Pro+). + - Parameter forceH264: If true, force H.264 codec regardless of other settings. */ - func recommendedVideoSettings(forOptions options: RecordVideoOptions) throws -> [String: Any] { - let settings: [String: Any]? - VisionLogger.log(level: .info, message: "Getting recommended video settings for \(options.fileType) file...") - if let videoCodec = options.codec { - // User passed a custom codec - if supportsCodec(videoCodec, writingTo: options.fileType) { - // The codec is supported, use it - VisionLogger.log(level: .info, message: "Using codec \(videoCodec)...") - settings = recommendedVideoSettings(forVideoCodecType: videoCodec, assetWriterOutputFileType: options.fileType) - } else { - // The codec is not supported, fall-back to default - VisionLogger.log(level: .info, message: "Codec \(videoCodec) is not supported, falling back to default...") + func recommendedVideoSettings(forOptions options: RecordVideoOptions, + devicePixelFormat: OSType? = nil, + forceH264: Bool = false) throws -> [String: Any] { + var settings: [String: Any]? + let isConfirmed8Bit = isConfirmed8BitPixelFormat(devicePixelFormat) + + VisionLogger.log(level: .info, + message: "Getting recommended video settings for \(options.fileType) file " + + "(codec: \(options.codec?.rawValue ?? "default"), forceH264: \(forceH264))...") + VisionLogger.log(level: .info, + message: "Device pixel format: \(devicePixelFormat.map { String(format: "0x%08X", $0) } ?? "nil"), " + + "isConfirmed8Bit: \(isConfirmed8Bit)") + + // If forceH264 is set, skip all other codecs and go straight to H.264 + if forceH264 { + VisionLogger.log(level: .warning, message: "Forcing H.264 codec for maximum compatibility...") + if supportsCodec(.h264, writingTo: options.fileType) { + settings = recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: options.fileType) + if let codecValue = settings?[AVVideoCodecKey] as? String, codecValue != AVVideoCodecType.h264.rawValue { + VisionLogger.log(level: .warning, + message: "recommendedVideoSettings returned \(codecValue) instead of H.264, overriding...") + settings?[AVVideoCodecKey] = AVVideoCodecType.h264.rawValue + } + } + } + + // If not forcing H.264 or H.264 isn't available, try normal codec selection + if settings == nil { + let preferredCodec: AVVideoCodecType = options.codec ?? .hevc + + // Strategy 1: Try the preferred codec + if supportsCodec(preferredCodec, writingTo: options.fileType) { + VisionLogger.log(level: .info, message: "Using preferred codec \(preferredCodec.rawValue)...") + settings = recommendedVideoSettings(forVideoCodecType: preferredCodec, assetWriterOutputFileType: options.fileType) + } + + // Strategy 2: If preferred codec returned ProRes settings or wasn't supported, try HEVC + if settings == nil || isNonStandardCodec(in: settings) { + if preferredCodec != .hevc && supportsCodec(.hevc, writingTo: options.fileType) { + VisionLogger.log(level: .warning, message: "Preferred codec returned non-standard settings. Trying HEVC...") + settings = recommendedVideoSettings(forVideoCodecType: .hevc, assetWriterOutputFileType: options.fileType) + } + } + + // Strategy 3: Try H.264 + if settings == nil || isNonStandardCodec(in: settings) { + if supportsCodec(.h264, writingTo: options.fileType) { + VisionLogger.log(level: .warning, message: "HEVC returned non-standard settings. Trying H.264...") + settings = recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: options.fileType) + } + } + + // Strategy 4: Default recommended settings + if settings == nil { + VisionLogger.log(level: .warning, message: "No codec-specific settings available. Using default...") settings = recommendedVideoSettingsForAssetWriter(writingTo: options.fileType) } - } else { - // User didn't pass a custom codec, just use default - settings = recommendedVideoSettingsForAssetWriter(writingTo: options.fileType) } + guard var settings else { throw CameraError.capture(.createRecorderError(message: "Failed to get video settings!")) } + // Safety check: On devices like iPhone 14 Pro+ and iPhone 16/17, even codec-specific + // recommended settings may return ProRes/ProRes RAW codec settings that require + // SMPTE RDD18 metadata (ISO sensitivity, white balance, etc.) which this library + // does not provide. Force-override any non-standard codec to a compatible one. + if isNonStandardCodec(in: settings) { + let originalCodec = settings[AVVideoCodecKey] as? String ?? "nil" + let fallbackCodec: AVVideoCodecType = (forceH264 || isConfirmed8Bit) ? .h264 : .hevc + VisionLogger.log(level: .warning, + message: "Overriding non-standard codec \(originalCodec) → \(fallbackCodec.rawValue)") + settings[AVVideoCodecKey] = fallbackCodec.rawValue + + // Strip any ProRes-specific compression properties that may be incompatible + if var compressionProps = settings[AVVideoCompressionPropertiesKey] as? [String: Any] { + let standardKeys: Set = [ + AVVideoAverageBitRateKey, + AVVideoExpectedSourceFrameRateKey, + AVVideoMaxKeyFrameIntervalKey, + AVVideoMaxKeyFrameIntervalDurationKey, + AVVideoProfileLevelKey, + AVVideoQualityKey, + ] + compressionProps = compressionProps.filter { standardKeys.contains($0.key) } + settings[AVVideoCompressionPropertiesKey] = compressionProps + } + } + + // Set correct profile based on codec + let codecKey = settings[AVVideoCodecKey] as? String + if codecKey == AVVideoCodecType.hevc.rawValue { + var compressionProps = settings[AVVideoCompressionPropertiesKey] as? [String: Any] ?? [:] + // Use Main10 profile for HEVC - it can encode both 8-bit and 10-bit content + // with no quality loss. This eliminates -12905 errors entirely. + VisionLogger.log(level: .info, message: "Setting HEVC Main10 profile for universal compatibility") + compressionProps[AVVideoProfileLevelKey] = kVTProfileLevel_HEVC_Main10_AutoLevel as String + settings[AVVideoCompressionPropertiesKey] = compressionProps + } else if codecKey == AVVideoCodecType.h264.rawValue { + var compressionProps = settings[AVVideoCompressionPropertiesKey] as? [String: Any] ?? [:] + VisionLogger.log(level: .info, message: "Setting H.264 High profile for best compatibility") + compressionProps[AVVideoProfileLevelKey] = AVVideoProfileLevelH264HighAutoLevel + settings[AVVideoCompressionPropertiesKey] = compressionProps + } + if let bitRateOverride = options.bitRateOverride { // Convert from Mbps -> bps let bitsPerSecond = bitRateOverride * 1_000_000 @@ -48,7 +156,9 @@ extension AVCaptureVideoDataOutput { } var compressionSettings = settings[AVVideoCompressionPropertiesKey] as? [String: Any] ?? [:] let currentBitRate = compressionSettings[AVVideoAverageBitRateKey] as? NSNumber - VisionLogger.log(level: .info, message: "Setting Video Bit-Rate from \(currentBitRate?.doubleValue.description ?? "nil") bps to \(bitsPerSecond) bps...") + VisionLogger.log(level: .info, + message: "Setting Video Bit-Rate from \(currentBitRate?.doubleValue.description ?? "nil") bps " + + "to \(bitsPerSecond) bps...") compressionSettings[AVVideoAverageBitRateKey] = NSNumber(value: bitsPerSecond) settings[AVVideoCompressionPropertiesKey] = compressionSettings diff --git a/package/ios/Core/Extensions/FourCharCode+toString.swift b/package/ios/Core/Extensions/FourCharCode+toString.swift index 0e9b69d8ca..61964d9c5f 100644 --- a/package/ios/Core/Extensions/FourCharCode+toString.swift +++ b/package/ios/Core/Extensions/FourCharCode+toString.swift @@ -14,4 +14,19 @@ extension FourCharCode { s.append(String(UnicodeScalar(self & 255)!)) return s } + + /// Convenience property for FourCC string representation + var fourCCString: String { + let bytes = [ + UInt8((self >> 24) & 0xFF), + UInt8((self >> 16) & 0xFF), + UInt8((self >> 8) & 0xFF), + UInt8(self & 0xFF), + ] + let isPrintable = bytes.allSatisfy { $0 >= 32 && $0 < 127 } + if isPrintable, let str = String(bytes: bytes, encoding: .ascii) { + return str + } + return String(format: "0x%08X", self) + } } diff --git a/package/ios/Core/Recording/Track.swift b/package/ios/Core/Recording/Track.swift index 25c51dc346..419a869a56 100644 --- a/package/ios/Core/Recording/Track.swift +++ b/package/ios/Core/Recording/Track.swift @@ -84,6 +84,18 @@ final class Track { timeline.resume() } + /** + Force-marks the track input as finished if it hasn't already been naturally finished + through the timeline. This is needed on devices (e.g. iPhone 14 Pro+, iPhone 16/17) where + the camera hardware stops delivering frames immediately after stop(), so no "late" frame + arrives to trigger the natural finish flow via TrackTimeline. + */ + func ensureFinished() { + guard !timeline.isFinished else { return } + VisionLogger.log(level: .info, message: "Force-finishing \(type) track input (no late frames arrived after stop).") + assetWriterInput.markAsFinished() + } + func append(buffer originalBuffer: CMSampleBuffer) throws { // 1. If the track is already finished (from a previous call), don't write anything. if timeline.isFinished { diff --git a/package/ios/Core/RecordingSession.swift b/package/ios/Core/RecordingSession.swift index adee189013..42c58e929d 100644 --- a/package/ios/Core/RecordingSession.swift +++ b/package/ios/Core/RecordingSession.swift @@ -308,6 +308,15 @@ final class RecordingSession { // If there are audio frames after this timestamp, they will be cut off. assetWriter.endSession(atSourceTime: lastVideoTimestamp) VisionLogger.log(level: .info, message: "Asset Writer session stopped at \(lastVideoTimestamp.seconds).") + + // Ensure all track inputs are marked as finished before calling finishWriting. + // On some devices (e.g. iPhone 14 Pro+, iPhone 16/17), the camera hardware stops + // delivering frames immediately after stop(), so no "late" frame arrives to trigger + // Track's natural markAsFinished() flow via the timeline. Without this, + // finishWriting() would hang indefinitely waiting for inputs to be marked as finished. + videoTrack.ensureFinished() + audioTrack?.ensureFinished() + assetWriter.finishWriting { VisionLogger.log(level: .info, message: "Asset Writer finished writing!") self.completionHandler(self, self.assetWriter.status, self.assetWriter.error)