diff --git a/docs/docs/guides/RECORDING_VIDEOS.mdx b/docs/docs/guides/RECORDING_VIDEOS.mdx index 99a5dc2f5d..3cd6ed19c2 100644 --- a/docs/docs/guides/RECORDING_VIDEOS.mdx +++ b/docs/docs/guides/RECORDING_VIDEOS.mdx @@ -42,6 +42,7 @@ To start a video recording you first have to enable video capture: {...props} video={true} audio={true} // <-- optional + onBytesWrittenVideo={(bytes) => {/*Whatever you need with bytes in realtime while it is recording*/}} // <-- optional /> ``` diff --git a/example/src/CameraPage.tsx b/example/src/CameraPage.tsx index af65e71462..1a66536817 100644 --- a/example/src/CameraPage.tsx +++ b/example/src/CameraPage.tsx @@ -205,6 +205,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { ref={camera} onInitialized={onInitialized} onError={onError} + onBytesWrittenVideo={(bytes) => console.log(`Bytes written: ${bytes / 1024 / 1024} MB!`)} onStarted={() => console.log('Camera started!')} onStopped={() => console.log('Camera stopped!')} onPreviewStarted={() => console.log('Preview started!')} diff --git a/package/android/src/main/cpp/frameprocessors/FrameHostObject.cpp b/package/android/src/main/cpp/frameprocessors/FrameHostObject.cpp index 09970a9a6a..cf229ee5fb 100644 --- a/package/android/src/main/cpp/frameprocessors/FrameHostObject.cpp +++ b/package/android/src/main/cpp/frameprocessors/FrameHostObject.cpp @@ -55,7 +55,7 @@ std::vector FrameHostObject::getPropertyNames(jsi::Runtime& rt) return result; } -#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value +#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count)->jsi::Value jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) { auto name = propName.utf8(runtime); diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession+Video.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession+Video.kt index fcc54b8e95..eea1c385fd 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession+Video.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession+Video.kt @@ -17,7 +17,8 @@ fun CameraSession.startRecording( enableAudio: Boolean, options: RecordVideoOptions, callback: (video: Video) -> Unit, - onError: (error: CameraError) -> Unit + onError: (error: CameraError) -> Unit, + onBytesWrittenCallback: (bytes: Long) -> Unit ) { if (camera == null) throw CameraNotReadyError() if (recording != null) throw RecordingInProgressError() @@ -49,7 +50,10 @@ fun CameraSession.startRecording( is VideoRecordEvent.Pause -> Log.i(CameraSession.TAG, "Recording paused!") - is VideoRecordEvent.Status -> Log.i(CameraSession.TAG, "Status update! Recorded ${event.recordingStats.numBytesRecorded} bytes.") + is VideoRecordEvent.Status -> { + Log.i(CameraSession.TAG, "Status update! Recorded ${event.recordingStats.numBytesRecorded} bytes.") + onBytesWrittenCallback(event.recordingStats.numBytesRecorded) + } is VideoRecordEvent.Finalize -> { if (isRecordingCanceled) { diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt index 13f6a763c1..eadbf10cc9 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt @@ -221,5 +221,6 @@ class CameraSession(internal val context: Context, internal val callback: Callba fun onOutputOrientationChanged(outputOrientation: Orientation) fun onPreviewOrientationChanged(previewOrientation: Orientation) fun onCodeScanned(codes: List, scannerFrame: CodeScannerFrame) + fun onBytesWrittenVideo(bytesWritten: Double) } } diff --git a/package/android/src/main/java/com/mrousavy/camera/react/CameraView+Events.kt b/package/android/src/main/java/com/mrousavy/camera/react/CameraView+Events.kt index aacb5c7079..1a491d0b9b 100644 --- a/package/android/src/main/java/com/mrousavy/camera/react/CameraView+Events.kt +++ b/package/android/src/main/java/com/mrousavy/camera/react/CameraView+Events.kt @@ -125,6 +125,17 @@ fun CameraView.invokeOnAverageFpsChanged(averageFps: Double) { this.sendEvent(event) } +fun CameraView.invokeOnBytesWrittenVideo(bytesWritten: Double) { + Log.i(CameraView.TAG, "invokeOnBytesWrittenVideo($bytesWritten)") + + val surfaceId = UIManagerHelper.getSurfaceId(this) + val data = Arguments.createMap() + data.putDouble("bytesWritten", bytesWritten) + + val event = BytesWrittenVideoEvent(surfaceId, id, data) + this.sendEvent(event) +} + fun CameraView.invokeOnCodeScanned(barcodes: List, scannerFrame: CodeScannerFrame) { val codes = Arguments.createArray() barcodes.forEach { barcode -> diff --git a/package/android/src/main/java/com/mrousavy/camera/react/CameraView+RecordVideo.kt b/package/android/src/main/java/com/mrousavy/camera/react/CameraView+RecordVideo.kt index e2e16e4716..f65f42b18a 100644 --- a/package/android/src/main/java/com/mrousavy/camera/react/CameraView+RecordVideo.kt +++ b/package/android/src/main/java/com/mrousavy/camera/react/CameraView+RecordVideo.kt @@ -17,7 +17,9 @@ import com.mrousavy.camera.core.types.Video import com.mrousavy.camera.react.utils.makeErrorMap fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallback: Callback) { - // check audio permission + val onBytesWrittenCallback = { bytes: Long -> + this.onBytesWrittenVideo(bytes.toDouble()) + } if (audio) { if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { throw MicrophonePermissionError() @@ -36,7 +38,7 @@ fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallback: Cal val errorMap = makeErrorMap(error.code, error.message) onRecordCallback(null, errorMap) } - cameraSession.startRecording(audio, options, callback, onError) + cameraSession.startRecording(audio, options, callback, onError, onBytesWrittenCallback) } fun CameraView.pauseRecording() { diff --git a/package/android/src/main/java/com/mrousavy/camera/react/CameraView.kt b/package/android/src/main/java/com/mrousavy/camera/react/CameraView.kt index 64a751eaab..7b3fbebab4 100644 --- a/package/android/src/main/java/com/mrousavy/camera/react/CameraView.kt +++ b/package/android/src/main/java/com/mrousavy/camera/react/CameraView.kt @@ -350,4 +350,8 @@ class CameraView(context: Context) : override fun onAverageFpsChanged(averageFps: Double) { invokeOnAverageFpsChanged(averageFps) } + + override fun onBytesWrittenVideo(bytesWritten: Double) { + invokeOnBytesWrittenVideo(bytesWritten) + } } diff --git a/package/android/src/main/java/com/mrousavy/camera/react/CameraViewManager.kt b/package/android/src/main/java/com/mrousavy/camera/react/CameraViewManager.kt index c5845b727f..ed2a42a3ae 100644 --- a/package/android/src/main/java/com/mrousavy/camera/react/CameraViewManager.kt +++ b/package/android/src/main/java/com/mrousavy/camera/react/CameraViewManager.kt @@ -41,6 +41,7 @@ class CameraViewManager : ViewGroupManager() { .put(CameraOutputOrientationChangedEvent.EVENT_NAME, MapBuilder.of("registrationName", "onOutputOrientationChanged")) .put(CameraPreviewOrientationChangedEvent.EVENT_NAME, MapBuilder.of("registrationName", "onPreviewOrientationChanged")) .put(AverageFpsChangedEvent.EVENT_NAME, MapBuilder.of("registrationName", "onAverageFpsChanged")) + .put(BytesWrittenVideoEvent.EVENT_NAME, MapBuilder.of("registrationName", "onBytesWrittenVideo")) .build() override fun getName(): String = TAG diff --git a/package/android/src/main/java/com/mrousavy/camera/react/Events.kt b/package/android/src/main/java/com/mrousavy/camera/react/Events.kt index acbb77aa92..3bdafe594c 100644 --- a/package/android/src/main/java/com/mrousavy/camera/react/Events.kt +++ b/package/android/src/main/java/com/mrousavy/camera/react/Events.kt @@ -102,3 +102,11 @@ class CameraCodeScannedEvent(surfaceId: Int, viewId: Int, private val data: Writ const val EVENT_NAME = "topCameraCodeScanned" } } +class BytesWrittenVideoEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : + Event(surfaceId, viewId) { + override fun getEventName() = EVENT_NAME + override fun getEventData() = data + companion object { + const val EVENT_NAME = "bytesWrittenVideoEvent" + } +} diff --git a/package/ios/Core/CameraConfiguration.swift b/package/ios/Core/CameraConfiguration.swift index a4b94dd377..58d6f6080b 100644 --- a/package/ios/Core/CameraConfiguration.swift +++ b/package/ios/Core/CameraConfiguration.swift @@ -153,7 +153,7 @@ final class CameraConfiguration { case disabled case enabled(config: T) - public static func == (lhs: OutputConfiguration, rhs: OutputConfiguration) -> Bool { + static func == (lhs: OutputConfiguration, rhs: OutputConfiguration) -> Bool { switch (lhs, rhs) { case (.disabled, .disabled): return true diff --git a/package/ios/Core/CameraSession+Video.swift b/package/ios/Core/CameraSession+Video.swift index 8e57710f24..77839f6332 100644 --- a/package/ios/Core/CameraSession+Video.swift +++ b/package/ios/Core/CameraSession+Video.swift @@ -18,7 +18,8 @@ extension CameraSession { */ func startRecording(options: RecordVideoOptions, onVideoRecorded: @escaping (_ video: Video) -> Void, - onError: @escaping (_ error: CameraError) -> Void) { + onError: @escaping (_ error: CameraError) -> Void, + onBytesWritten: @escaping (_ bytes: Double) -> Void) { // Run on Camera Queue CameraQueues.cameraQueue.async { let start = DispatchTime.now() @@ -48,6 +49,8 @@ extension CameraSession { } self.recordingSession = nil + self.recordingSizeTimer?.cancel() + self.recordingSizeTimer = nil if self.didCancelRecording { VisionLogger.log(level: .info, message: "RecordingSession finished because the recording was canceled.") @@ -128,6 +131,23 @@ extension CameraSession { self.didCancelRecording = false self.recordingSession = recordingSession + let timer = DispatchSource.makeTimerSource(queue: CameraQueues.cameraQueue) + timer.schedule(deadline: .now(), repeating: 0.4) + + timer.setEventHandler { + guard let session = self.recordingSession else { + timer.cancel() + return + } + + let path = session.url.path + if let size = try? FileManager.default.attributesOfItem(atPath: path)[.size] as? NSNumber { + let bytes = size.doubleValue + onBytesWritten(bytes) + } + } + self.recordingSizeTimer = timer + self.recordingSizeTimer?.resume() let end = DispatchTime.now() VisionLogger.log(level: .info, message: "RecordingSesssion started in \(Double(end.uptimeNanoseconds - start.uptimeNanoseconds) / 1_000_000)ms!") } catch let error as CameraError { diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index 10b0f3399c..b02eb44063 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -31,6 +31,7 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat // State var metadataProvider = MetadataProvider() var recordingSession: RecordingSession? + var recordingSizeTimer: DispatchSourceTimer? var didCancelRecording = false var orientationManager = OrientationManager() @@ -265,7 +266,7 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat } } - public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { switch captureOutput { case is AVCaptureVideoDataOutput: onVideoFrame(sampleBuffer: sampleBuffer, orientation: connection.orientation, isMirrored: connection.isVideoMirrored) diff --git a/package/ios/Core/PreviewView.swift b/package/ios/Core/PreviewView.swift index 9c6ff25cce..81a8d7cba0 100644 --- a/package/ios/Core/PreviewView.swift +++ b/package/ios/Core/PreviewView.swift @@ -48,7 +48,7 @@ final class PreviewView: UIView { } } - override public static var layerClass: AnyClass { + override static var layerClass: AnyClass { return AVCaptureVideoPreviewLayer.self } diff --git a/package/ios/Core/Recording/Track.swift b/package/ios/Core/Recording/Track.swift index 8dec5ce89a..25c51dc346 100644 --- a/package/ios/Core/Recording/Track.swift +++ b/package/ios/Core/Recording/Track.swift @@ -51,7 +51,7 @@ final class Track { /** Returns the last timestamp that was actually written to the track. */ - public private(set) var lastTimestamp: CMTime? + private(set) var lastTimestamp: CMTime? /** Gets the natural size of the asset writer, or zero if it is not a visual track. diff --git a/package/ios/Core/Recording/TrackTimeline.swift b/package/ios/Core/Recording/TrackTimeline.swift index 345322fdd6..5e33e6af9e 100644 --- a/package/ios/Core/Recording/TrackTimeline.swift +++ b/package/ios/Core/Recording/TrackTimeline.swift @@ -25,22 +25,22 @@ final class TrackTimeline { Represents whether the timeline has been marked as finished or not. A timeline will automatically be marked as finished when a timestamp arrives that appears after a stop(). */ - public private(set) var isFinished = false + private(set) var isFinished = false /** Gets the latency of the buffers in this timeline. This is computed by (currentTime - mostRecentBuffer.timestamp) */ - public private(set) var latency: CMTime = .zero + private(set) var latency: CMTime = .zero /** Get the first actually written timestamp of this timeline */ - public private(set) var firstTimestamp: CMTime? + private(set) var firstTimestamp: CMTime? /** Get the last actually written timestamp of this timeline. */ - public private(set) var lastTimestamp: CMTime? + private(set) var lastTimestamp: CMTime? init(ofTrackType type: TrackType, withClock clock: CMClock) { trackType = type diff --git a/package/ios/FrameProcessors/FrameHostObject.mm b/package/ios/FrameProcessors/FrameHostObject.mm index 67bbfe51df..94f7250447 100644 --- a/package/ios/FrameProcessors/FrameHostObject.mm +++ b/package/ios/FrameProcessors/FrameHostObject.mm @@ -40,7 +40,7 @@ return result; } -#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value +#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count)->jsi::Value jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) { auto name = propName.utf8(runtime); diff --git a/package/ios/React/CameraView+RecordVideo.swift b/package/ios/React/CameraView+RecordVideo.swift index 913d420efd..ddd06fa42c 100644 --- a/package/ios/React/CameraView+RecordVideo.swift +++ b/package/ios/React/CameraView+RecordVideo.swift @@ -28,7 +28,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud }, onError: { error in callback.reject(error: error) - } + }, + onBytesWritten: onBytesWrittenVideo ) } catch { // Some error occured while initializing VideoSettings diff --git a/package/ios/React/CameraView.swift b/package/ios/React/CameraView.swift index c773975353..64282f295f 100644 --- a/package/ios/React/CameraView.swift +++ b/package/ios/React/CameraView.swift @@ -80,6 +80,7 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat @objc var onOutputOrientationChangedEvent: RCTDirectEventBlock? @objc var onViewReadyEvent: RCTDirectEventBlock? @objc var onAverageFpsChangedEvent: RCTDirectEventBlock? + @objc var onBytesWrittenVideoEvent: RCTDirectEventBlock? @objc var onCodeScannedEvent: RCTDirectEventBlock? // zoom @@ -392,4 +393,10 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat "averageFps": averageFps, ]) } + + func onBytesWrittenVideo(bytes: Double) { + onBytesWrittenVideoEvent?([ + "bytesWritten": bytes, + ]) + } } diff --git a/package/ios/React/CameraViewManager.m b/package/ios/React/CameraViewManager.m index 527c9bc0fd..4aacb1ba2a 100644 --- a/package/ios/React/CameraViewManager.m +++ b/package/ios/React/CameraViewManager.m @@ -68,6 +68,7 @@ @interface RCT_EXTERN_REMAP_MODULE (CameraView, CameraViewManager, RCTViewManage RCT_REMAP_VIEW_PROPERTY(onPreviewOrientationChanged, onPreviewOrientationChangedEvent, RCTDirectEventBlock); RCT_REMAP_VIEW_PROPERTY(onViewReady, onViewReadyEvent, RCTDirectEventBlock); RCT_REMAP_VIEW_PROPERTY(onAverageFpsChanged, onAverageFpsChangedEvent, RCTDirectEventBlock); +RCT_REMAP_VIEW_PROPERTY(onBytesWrittenVideo, onBytesWrittenVideoEvent, RCTDirectEventBlock); // Code Scanner RCT_EXPORT_VIEW_PROPERTY(codeScannerOptions, NSDictionary); RCT_REMAP_VIEW_PROPERTY(onCodeScanned, onCodeScannedEvent, RCTDirectEventBlock); diff --git a/package/ios/React/Utils/Promise.swift b/package/ios/React/Utils/Promise.swift index d1a7a132a2..946ea5cce1 100644 --- a/package/ios/React/Utils/Promise.swift +++ b/package/ios/React/Utils/Promise.swift @@ -14,7 +14,7 @@ import Foundation * Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once. */ class Promise { - public private(set) var didResolve = false + private(set) var didResolve = false init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) { self.resolver = resolver diff --git a/package/src/Camera.tsx b/package/src/Camera.tsx index afe056a761..900242e5a9 100644 --- a/package/src/Camera.tsx +++ b/package/src/Camera.tsx @@ -7,7 +7,7 @@ import type { CameraProps, DrawableFrameProcessor, OnShutterEvent, ReadonlyFrame import { CameraModule } from './NativeCameraModule' import type { PhotoFile, TakePhotoOptions } from './types/PhotoFile' import type { Point } from './types/Point' -import type { RecordVideoOptions, VideoFile } from './types/VideoFile' +import type { OnBytesWrittenVideoEvent, RecordVideoOptions, VideoFile } from './types/VideoFile' import { VisionCameraProxy } from './frame-processors/VisionCameraProxy' import { CameraDevices } from './CameraDevices' import type { EmitterSubscription, NativeSyntheticEvent, NativeMethods } from 'react-native' @@ -102,6 +102,7 @@ export class Camera extends React.PureComponent { this.onPreviewOrientationChanged = this.onPreviewOrientationChanged.bind(this) this.onError = this.onError.bind(this) this.onCodeScanned = this.onCodeScanned.bind(this) + this.onBytesWrittenVideo = this.onBytesWrittenVideo.bind(this) this.ref = React.createRef() this.lastFrameProcessor = undefined this.state = { @@ -599,6 +600,10 @@ export class Camera extends React.PureComponent { }) } + private onBytesWrittenVideo({ nativeEvent: { bytesWritten } }: NativeSyntheticEvent): void { + this.props.onBytesWrittenVideo?.(bytesWritten) + } + /** @internal */ componentDidUpdate(): void { if (!this.isNativeViewMounted) return @@ -657,6 +662,7 @@ export class Camera extends React.PureComponent { isMirrored={props.isMirrored ?? shouldBeMirrored} onViewReady={this.onViewReady} onAverageFpsChanged={enableFpsGraph ? this.onAverageFpsChanged : undefined} + onBytesWrittenVideo={this.onBytesWrittenVideo} onInitialized={this.onInitialized} onCodeScanned={this.onCodeScanned} onStarted={this.onStarted} diff --git a/package/src/NativeCameraView.ts b/package/src/NativeCameraView.ts index b53009ce56..eb491b3112 100644 --- a/package/src/NativeCameraView.ts +++ b/package/src/NativeCameraView.ts @@ -4,6 +4,7 @@ import type { ErrorWithCause } from './CameraError' import type { CameraProps, OnShutterEvent } from './types/CameraProps' import type { Code, CodeScanner, CodeScannerFrame } from './types/CodeScanner' import type { Orientation } from './types/Orientation' +import type { OnBytesWrittenVideoEvent } from './types/VideoFile' export interface OnCodeScannedEvent { codes: Code[] @@ -35,6 +36,7 @@ export type NativeCameraViewProps = Omit< | 'codeScanner' | 'fps' | 'videoBitRate' + | 'onBytesWrittenVideo' > & { // private intermediate props cameraId: string @@ -58,6 +60,7 @@ export type NativeCameraViewProps = Omit< onShutter?: (event: NativeSyntheticEvent) => void onOutputOrientationChanged?: (event: NativeSyntheticEvent) => void onPreviewOrientationChanged?: (event: NativeSyntheticEvent) => void + onBytesWrittenVideo?: (event: NativeSyntheticEvent) => void } // requireNativeComponent automatically resolves 'CameraView' to 'CameraViewManager' diff --git a/package/src/types/CameraProps.ts b/package/src/types/CameraProps.ts index 18bfc9b618..36ff239dc7 100644 --- a/package/src/types/CameraProps.ts +++ b/package/src/types/CameraProps.ts @@ -416,5 +416,18 @@ export interface CameraProps extends ViewProps { * ``` */ codeScanner?: CodeScanner + /** + * Fires every few hundred milliseconds to notify how many + * total bytes are currently written to the video file. + * @example + * ```tsx + * const onBytesWrittenVideo = (bytes: number) => { + * console.log(`Bytes written: ${bytes}`) + * } + * + * return + * ``` + */ + onBytesWrittenVideo?: (bytes: number) => void //#endregion } diff --git a/package/src/types/VideoFile.ts b/package/src/types/VideoFile.ts index 6391df76fa..40d90e247b 100644 --- a/package/src/types/VideoFile.ts +++ b/package/src/types/VideoFile.ts @@ -54,3 +54,10 @@ export interface VideoFile extends TemporaryFile { */ height: number } + +export interface OnBytesWrittenVideoEvent { + /** + * Represents the amount of total bytes written to the video file. + */ + bytesWritten: number +}