diff --git a/Common/Localizable.xcstrings b/Common/Localizable.xcstrings
index 2746ce6f8..11ca078aa 100644
--- a/Common/Localizable.xcstrings
+++ b/Common/Localizable.xcstrings
@@ -60465,6 +60465,9 @@
}
}
}
+ },
+ "Custom WHIP" : {
+
},
"Cycling power device" : {
"localizations" : {
@@ -65053,6 +65056,9 @@
}
}
}
+ },
+ "Disable the WHIP server to change its settings." : {
+
},
"Disabled connections will not be used." : {
"localizations" : {
@@ -70517,6 +70523,9 @@
}
}
}
+ },
+ "Each stream can receive video from one WHIP publisher on the local network." : {
+
},
"Effects" : {
"localizations" : {
@@ -74809,6 +74818,12 @@
}
}
}
+ },
+ "Enter one of the URLs into the WHIP publisher device to send video to this stream. Usually enter the WiFi or Personal Hotspot URL." : {
+
+ },
+ "Enter STUN/TURN URLs, one per line." : {
+
},
"Estimated viewer delay" : {
"localizations" : {
@@ -75401,6 +75416,12 @@
}
}
}
+ },
+ "Example: http://192.168.1.50:8080/live/whip" : {
+
+ },
+ "Example: https://example.com/live/whip" : {
+
},
"Example: rtmp://arn03.contribute.live-video.net/app/live_123321_sdfopjfwjfpawjefpjawef" : {
"localizations" : {
@@ -76881,6 +76902,9 @@
}
}
}
+ },
+ "Example: stun:stun.l.google.com:19302" : {
+
},
"EXB" : {
"localizations" : {
@@ -96094,6 +96118,12 @@
}
}
}
+ },
+ "https://example.com/live/whip" : {
+
+ },
+ "ICE servers" : {
+
},
"Icons to buy" : {
"localizations" : {
@@ -109996,6 +110026,9 @@
}
}
}
+ },
+ "Malformed WHIP URL" : {
+
},
"Manage streams" : {
"localizations" : {
@@ -111618,6 +111651,9 @@
}
}
}
+ },
+ "Max retries" : {
+
},
"Maximum" : {
"localizations" : {
@@ -129819,6 +129855,9 @@
}
}
}
+ },
+ "Note: Custom ICE servers may be ignored depending on WHIP backend." : {
+
},
"NOTE: Only works on Mac as `hevc_videotoolbox` uses Apple’s encoder." : {
"localizations" : {
@@ -137208,7 +137247,7 @@
}
}
},
- "Periodic, audio and video" : {
+ "Periodic audio and video" : {
},
"Permissions" : {
@@ -139726,6 +139765,9 @@
}
}
}
+ },
+ "Please use a valid whip:// URL." : {
+
},
"PNGTuber" : {
"localizations" : {
@@ -196244,6 +196286,9 @@
}
}
}
+ },
+ "Template: https://my_domain/my_endpoint" : {
+
},
"Template: rtmp://[nearby_ingest_endpoint](https://help.twitch.tv/s/twitch-ingest-recommendation)/app/" : {
"localizations" : {
@@ -202750,6 +202795,9 @@
}
}
}
+ },
+ "The TCP port the WHIP server listens for publishers on." : {
+
},
"The UDP port %u will also be used." : {
"localizations" : {
@@ -203934,6 +203982,9 @@
}
}
}
+ },
+ "The WHIP server allows Moblin to receive video streams over the network using WebRTC (WHIP)." : {
+
},
"The zoom (in X) to set when switching to given camera, if enabled." : {
"localizations" : {
@@ -214442,6 +214493,9 @@
}
}
}
+ },
+ "Update Settings → Streams → %@ → Video/Audio." : {
+
},
"Uptime" : {
"localizations" : {
@@ -223903,6 +223957,27 @@
},
"When \"Audio only\" mode is selected, no video will be rendered at all. Only audio will play." : {
+ },
+ "WHEP client" : {
+
+ },
+ "WHEP disconnected" : {
+
+ },
+ "WHIP" : {
+
+ },
+ "WHIP disconnected" : {
+
+ },
+ "WHIP requires H.264 video and Opus audio." : {
+
+ },
+ "WHIP server" : {
+
+ },
+ "WHIP uses HTTP/HTTPS endpoints. (whip:// is also accepted for compatibility.)" : {
+
},
"Whirlpool" : {
"localizations" : {
diff --git a/Common/Various/CommonUtils.swift b/Common/Various/CommonUtils.swift
index edcb14b6d..205404165 100644
--- a/Common/Various/CommonUtils.swift
+++ b/Common/Various/CommonUtils.swift
@@ -498,6 +498,7 @@ private let cameraPositionRtmp = "(RTMP)"
private let cameraPositionSrtla = "(SRT(LA))"
private let cameraPositionRist = "(RIST)"
private let cameraPositionRtsp = "(RTSP)"
+private let cameraPositionWhep = "(WHEP)"
private let cameraPositionMediaPlayer = "(Media player)"
func rtmpCamera(name: String) -> String {
@@ -532,6 +533,14 @@ func isRtspCameraOrMic(camera: String) -> Bool {
return camera.hasSuffix(cameraPositionRtsp)
}
+func whepCamera(name: String) -> String {
+ return "\(name) \(cameraPositionWhep)"
+}
+
+func isWhepCameraOrMic(camera: String) -> Bool {
+ return camera.hasSuffix(cameraPositionWhep)
+}
+
func mediaPlayerCamera(name: String) -> String {
return "\(name) \(cameraPositionMediaPlayer)"
}
diff --git a/Common/Various/Validate.swift b/Common/Various/Validate.swift
index 2da27d9ac..8d74e547a 100644
--- a/Common/Various/Validate.swift
+++ b/Common/Various/Validate.swift
@@ -95,6 +95,14 @@ func isValidUrl(url value: String,
if let message = isValidRtmpUrl(url: value, rtmpStreamKeyRequired: rtmpStreamKeyRequired) {
return message
}
+ case "http":
+ break
+ case "https":
+ break
+ case "whip":
+ break
+ case "whips":
+ break
case "srt":
if let message = isValidSrtUrl(url: value) {
return message
diff --git a/Moblin Screen Recording/Moblin Screen Recording.entitlements b/Moblin Screen Recording/Moblin Screen Recording.entitlements
index 4e32970bb..fe1f1756d 100644
--- a/Moblin Screen Recording/Moblin Screen Recording.entitlements
+++ b/Moblin Screen Recording/Moblin Screen Recording.entitlements
@@ -4,7 +4,7 @@
com.apple.security.application-groups
- group.com.eerimoq.Moblin
+ group.io.meetmo.mocam
diff --git a/Moblin.xcodeproj/project.pbxproj b/Moblin.xcodeproj/project.pbxproj
index 7bd4745c2..15ab5c56f 100644
--- a/Moblin.xcodeproj/project.pbxproj
+++ b/Moblin.xcodeproj/project.pbxproj
@@ -34,6 +34,8 @@
03ECDF532B8E4E6000BD920E /* Moblin.app in Embed Watch Content */ = {isa = PBXBuildFile; fileRef = 03ECDF462B8E4E5E00BD920E /* Moblin.app */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
03ECDF5D2B8E5F0B00BD920E /* WrappingHStack in Frameworks */ = {isa = PBXBuildFile; productRef = 03ECDF5C2B8E5F0B00BD920E /* WrappingHStack */; };
03F465EC2C441D1400630708 /* CrcSwift in Frameworks */ = {isa = PBXBuildFile; productRef = 03F465EB2C441D1400630708 /* CrcSwift */; };
+ 08A1B2C3D4E5F67890123459 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 08A1B2C3D4E5F67890123457 /* HaishinKit */; };
+ 08A1B2C3D4E5F6789012345A /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */; };
882D0C162DF76F5B0035BFAF /* BlackSharkLib in Frameworks */ = {isa = PBXBuildFile; productRef = 882D0C152DF76F5B0035BFAF /* BlackSharkLib */; };
/* End PBXBuildFile section */
@@ -230,6 +232,8 @@
03A08B7C2AC295620018BA95 /* AlertToast in Frameworks */,
0377239C2DE35191007D040D /* VRMSceneKit in Frameworks */,
03BC116B2AE56C2200C38FC4 /* SDWebImageWebPCoder in Frameworks */,
+ 08A1B2C3D4E5F67890123459 /* HaishinKit in Frameworks */,
+ 08A1B2C3D4E5F6789012345A /* RTCHaishinKit in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -364,6 +368,8 @@
035351932F1C271700428DAC /* AppAuthCore */,
035351952F1C27A500428DAC /* AppAuth */,
0360FD152F228EEB00FF8847 /* MetalPetal */,
+ 08A1B2C3D4E5F67890123457 /* HaishinKit */,
+ 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */,
);
productName = Mobs;
productReference = 035E9E332A9A02D6009D4F5A /* Moblin.app */;
@@ -519,6 +525,7 @@
882D0C142DF76F5B0035BFAF /* XCRemoteSwiftPackageReference "BlackSharkLib" */,
035351922F1C271700428DAC /* XCRemoteSwiftPackageReference "AppAuth-iOS" */,
0360FD142F228EEB00FF8847 /* XCRemoteSwiftPackageReference "MetalPetal" */,
+ 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */,
);
productRefGroup = 035E9E342A9A02D6009D4F5A /* Products */;
projectDirPath = "";
@@ -643,6 +650,7 @@
CODE_SIGN_ENTITLEMENTS = "Moblin Screen Recording/Moblin Screen Recording.entitlements";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -661,7 +669,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 30.2027.0;
- PRODUCT_BUNDLE_IDENTIFIER = "com.eerimoq.Mobs.Moblin-Capture";
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Capture;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
@@ -680,6 +688,7 @@
CODE_SIGN_ENTITLEMENTS = "Moblin Screen Recording/Moblin Screen Recording.entitlements";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -698,7 +707,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 30.2027.0;
- PRODUCT_BUNDLE_IDENTIFIER = "com.eerimoq.Mobs.Moblin-Capture";
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Capture;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
@@ -857,10 +866,12 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "";
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = Moblin/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = Moblin;
+ INFOPLIST_KEY_ITSAppUsesNonExemptEncryption = NO;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.video";
INFOPLIST_KEY_NSBluetoothAlwaysUsageDescription = "Moblin communicates with DJI cameras using Bluetooth.";
INFOPLIST_KEY_NSCameraUsageDescription = "Moblin live streams video from the camera.";
@@ -889,7 +900,7 @@
MTLLINKER_FLAGS = "-fcikernel";
MTL_COMPILER_FLAGS = "-fcikernel";
MTL_HEADER_SEARCH_PATHS = "$(HEADER_SEARCH_PATHS)";
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam;
PRODUCT_NAME = "$(TARGET_NAME)";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
@@ -912,10 +923,12 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "";
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = Moblin/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = Moblin;
+ INFOPLIST_KEY_ITSAppUsesNonExemptEncryption = NO;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.video";
INFOPLIST_KEY_NSBluetoothAlwaysUsageDescription = "Moblin communicates with DJI cameras using Bluetooth.";
INFOPLIST_KEY_NSCameraUsageDescription = "Moblin live streams video from the camera.";
@@ -944,7 +957,7 @@
MTLLINKER_FLAGS = "-fcikernel";
MTL_COMPILER_FLAGS = "-fcikernel";
MTL_HEADER_SEARCH_PATHS = "$(HEADER_SEARCH_PATHS)";
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam;
PRODUCT_NAME = "$(TARGET_NAME)";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
@@ -963,6 +976,7 @@
ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -981,7 +995,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 0.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch.Widget;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch.Widget;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = watchos;
SKIP_INSTALL = YES;
@@ -1000,6 +1014,7 @@
ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GENERATE_INFOPLIST_FILE = YES;
@@ -1018,7 +1033,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 0.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch.Widget;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch.Widget;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = watchos;
SKIP_INSTALL = YES;
@@ -1081,6 +1096,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"Moblin Watch/Preview Content\"";
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_PREVIEWS = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
@@ -1094,7 +1110,7 @@
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = Yes;
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = Yes;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
- INFOPLIST_KEY_WKCompanionAppBundleIdentifier = com.eerimoq.Mobs;
+ INFOPLIST_KEY_WKCompanionAppBundleIdentifier = io.webmo.mocam;
INFOPLIST_KEY_WKRunsIndependentlyOfCompanionApp = NO;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
@@ -1102,7 +1118,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 0.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch;
PRODUCT_NAME = Moblin;
SDKROOT = watchos;
SKIP_INSTALL = YES;
@@ -1123,6 +1139,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"Moblin Watch/Preview Content\"";
+ DEVELOPMENT_TEAM = 442HHYCXW8;
ENABLE_PREVIEWS = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
@@ -1136,7 +1153,7 @@
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = Yes;
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = Yes;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
- INFOPLIST_KEY_WKCompanionAppBundleIdentifier = com.eerimoq.Mobs;
+ INFOPLIST_KEY_WKCompanionAppBundleIdentifier = io.webmo.mocam;
INFOPLIST_KEY_WKRunsIndependentlyOfCompanionApp = NO;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
@@ -1144,7 +1161,7 @@
);
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MARKETING_VERSION = 0.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch;
+ PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch;
PRODUCT_NAME = Moblin;
SDKROOT = watchos;
SKIP_INSTALL = YES;
@@ -1214,6 +1231,13 @@
};
/* End XCConfigurationList section */
+/* Begin XCLocalSwiftPackageReference section */
+ 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */ = {
+ isa = XCLocalSwiftPackageReference;
+ relativePath = Vendor/HaishinKit.swift;
+ };
+/* End XCLocalSwiftPackageReference section */
+
/* Begin XCRemoteSwiftPackageReference section */
0318D3682CF51D6900E12F3B /* XCRemoteSwiftPackageReference "swift-protobuf" */ = {
isa = XCRemoteSwiftPackageReference;
@@ -1459,6 +1483,16 @@
package = 03F465EA2C441D1400630708 /* XCRemoteSwiftPackageReference "CrcSwift" */;
productName = CrcSwift;
};
+ 08A1B2C3D4E5F67890123457 /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ package = 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */;
+ productName = HaishinKit;
+ };
+ 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ package = 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */;
+ productName = RTCHaishinKit;
+ };
882D0C152DF76F5B0035BFAF /* BlackSharkLib */ = {
isa = XCSwiftPackageProductDependency;
package = 882D0C142DF76F5B0035BFAF /* XCRemoteSwiftPackageReference "BlackSharkLib" */;
diff --git a/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
index e65b22013..65e1040e2 100644
--- a/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
+++ b/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
@@ -1,5 +1,5 @@
{
- "originHash" : "2ec89183ce973e73157e1f6178f92aabac79182102b81c7f9097c9d2d72f6022",
+ "originHash" : "cd2189c0bc4ce9b8808eccb14601cdb6dff0bacbf693a1ee023eef73c22b2903",
"pins" : [
{
"identity" : "alerttoast",
@@ -55,6 +55,15 @@
"version" : "2.2.0"
}
},
+ {
+ "identity" : "logboard",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/shogo4405/Logboard.git",
+ "state" : {
+ "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50",
+ "version" : "2.6.0"
+ }
+ },
{
"identity" : "metalpetal",
"kind" : "remoteSourceControl",
diff --git a/Moblin/Info.plist b/Moblin/Info.plist
index 80dbfc1a7..e53b22c25 100644
--- a/Moblin/Info.plist
+++ b/Moblin/Info.plist
@@ -10,15 +10,13 @@
CFBundleTypeRole
Editor
CFBundleURLName
- com.eerimoq.Mobs
+ io.webmo.mocam
CFBundleURLSchemes
moblin
- ITSAppUsesNonExemptEncryption
-
NSAppTransportSecurity
NSAllowsArbitraryLoads
@@ -36,6 +34,23 @@
IntentIntent
MuteIntent
+ UIApplicationSceneManifest
+
+ UIApplicationSupportsMultipleScenes
+
+ UISceneConfigurations
+
+ UIWindowSceneSessionRoleExternalDisplayNonInteractive
+
+
+ UISceneConfigurationName
+ External Display
+ UISceneDelegateClassName
+ $(PRODUCT_MODULE_NAME).SceneDelegate
+
+
+
+
UIBackgroundModes
audio
@@ -44,34 +59,15 @@
UIFileSharingEnabled
- UIApplicationSceneManifest
-
- UIApplicationSupportsMultipleScenes
-
- UISceneConfigurations
-
- UIWindowSceneSessionRoleExternalDisplayNonInteractive
-
-
- UISceneDelegateClassName
- $(PRODUCT_MODULE_NAME).SceneDelegate
- UISceneConfigurationName
- External Display
-
-
-
-
- CFBundleIdentifier
-
- WiFiAwareServices
-
- _moblin._udp
-
- Publishable
-
- Subscribable
-
-
-
+ WiFiAwareServices
+
+ _moblin._udp
+
+ Publishable
+
+ Subscribable
+
+
+
diff --git a/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift b/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift
index 56abd8bb6..f9ae37466 100644
--- a/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift
+++ b/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift
@@ -11,6 +11,7 @@ class VideoDecoder {
private var formatDescription: CMFormatDescription?
weak var delegate: (any VideoDecoderDelegate)?
private var invalidateSession = true
+ private var consecutiveBadFrames = 0
private var session: VTDecompressionSession? {
didSet {
oldValue?.invalidate()
@@ -30,6 +31,15 @@ class VideoDecoder {
}
}
+ /// Synchronously sets the format description and marks the session for recreation.
+ /// Use when `decodeSampleBuffer` will be called on the same queue immediately after,
+ /// avoiding the race condition with the async `startRunning(formatDescription:)`.
+ func setFormatDescriptionSync(_ formatDescription: CMFormatDescription) {
+ self.formatDescription = formatDescription
+ self.invalidateSession = true
+ self.isRunning = true
+ }
+
func stopRunning() {
lockQueue.async {
self.session = nil
@@ -55,6 +65,15 @@ class VideoDecoder {
}
guard let imageBuffer, status == noErr else {
logger.info("video-decoder: Failed to decode frame status \(status)")
+ // Recover from persistent bad-data errors (e.g., after app state change
+ // disrupts the hardware decoder). Recreate session on next IDR.
+ self.lockQueue.async {
+ self.consecutiveBadFrames += 1
+ if self.consecutiveBadFrames >= 3 {
+ self.invalidateSession = true
+ self.consecutiveBadFrames = 0
+ }
+ }
return
}
guard let formatDescription = CMVideoFormatDescription.create(imageBuffer: imageBuffer) else {
@@ -69,6 +88,7 @@ class VideoDecoder {
return
}
self.lockQueue.async {
+ self.consecutiveBadFrames = 0
self.delegate?.videoDecoderOutputSampleBuffer(self, sampleBuffer)
}
}
diff --git a/Moblin/Media/HaishinKit/WhipStream.swift b/Moblin/Media/HaishinKit/WhipStream.swift
new file mode 100644
index 000000000..39c9e9c57
--- /dev/null
+++ b/Moblin/Media/HaishinKit/WhipStream.swift
@@ -0,0 +1,172 @@
+import AVFoundation
+import Foundation
+
+import HaishinKit
+import RTCHaishinKit
+
+protocol WhipStreamDelegate: AnyObject {
+ func whipStreamOnConnected()
+ func whipStreamOnDisconnected(reason: String)
+}
+
+final class WhipStream: NSObject {
+ private let processor: Processor
+ private weak var delegate: (any WhipStreamDelegate)?
+
+ private var session: (any Session)?
+ private var rtcStream: RTCStream?
+ private var readyStateTask: Task?
+ private var didReportConnected = false
+
+ init(processor: Processor, delegate: WhipStreamDelegate) {
+ self.processor = processor
+ self.delegate = delegate
+ }
+
+ func start(
+ endpointUrl: URL,
+ settings: SettingsStreamWhip,
+ videoDimensions: CMVideoDimensions
+ ) {
+ Task { [weak self] in
+ guard let self else { return }
+ await self.startInternal(
+ endpointUrl: endpointUrl,
+ settings: settings,
+ videoDimensions: videoDimensions
+ )
+ }
+ }
+
+ func stop() {
+ Task { [weak self] in
+ guard let self else { return }
+ await self.stopInternal()
+ }
+ }
+
+ private func startInternal(
+ endpointUrl: URL,
+ settings: SettingsStreamWhip,
+ videoDimensions: CMVideoDimensions
+ ) async {
+ await stopInternal()
+ didReportConnected = false
+ do {
+ guard let session = try await SessionBuilderFactory.shared
+ .make(endpointUrl)
+ .setMode(.publish)
+ .setConfiguration(nil)
+ .build()
+ else {
+ throw NSError(domain: "Moblin", code: 2, userInfo: [
+ NSLocalizedDescriptionKey: "WHIP session could not be created",
+ ])
+ }
+ self.session = session
+ await session.setMaxRetryCount(settings.maxRetryCount)
+
+ let rtcStream = (await session.stream) as? RTCStream
+ guard let rtcStream else {
+ throw NSError(domain: "Moblin", code: 1, userInfo: [
+ NSLocalizedDescriptionKey: "WHIP session stream is not RTCStream",
+ ])
+ }
+ self.rtcStream = rtcStream
+
+ await rtcStream.setDirection(.sendonly)
+ try await rtcStream.setAudioSettings(.init(channelMap: [0, 0], format: .opus))
+ try await rtcStream.setVideoSettings(.init(videoSize: .init(
+ width: Double(videoDimensions.width),
+ height: Double(videoDimensions.height)
+ )))
+
+ readyStateTask = Task { [weak self] in
+ guard let self else { return }
+ for await state in await session.readyState {
+ switch state {
+ case .open:
+ processorControlQueue.async {
+ self.processor.startEncoding(self)
+ guard !self.didReportConnected else { return }
+ self.didReportConnected = true
+ DispatchQueue.main.async {
+ self.delegate?.whipStreamOnConnected()
+ }
+ }
+ case .closing, .closed:
+ processorControlQueue.async {
+ self.processor.stopEncoding(self)
+ }
+ default:
+ break
+ }
+ }
+ }
+
+ try await session.connect { [weak self] in
+ guard let self else { return }
+ processorControlQueue.async {
+ self.processor.stopEncoding(self)
+ }
+ DispatchQueue.main.async {
+ self.delegate?.whipStreamOnDisconnected(reason: String(localized: "WHIP disconnected"))
+ }
+ }
+ } catch {
+ processorControlQueue.async { [weak self] in
+ guard let self else { return }
+ self.processor.stopEncoding(self)
+ }
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.whipStreamOnDisconnected(reason: "WHIP connect failed: \(error)")
+ }
+ await stopInternal()
+ }
+ }
+
+ private func stopInternal() async {
+ readyStateTask?.cancel()
+ readyStateTask = nil
+ didReportConnected = false
+
+ processorControlQueue.async { [weak self] in
+ guard let self else { return }
+ self.processor.stopEncoding(self)
+ }
+
+ do {
+ try await session?.close()
+ } catch {
+ // Best effort close.
+ }
+ self.session = nil
+ self.rtcStream = nil
+ }
+}
+
+extension WhipStream: AudioEncoderDelegate {
+ func audioEncoderOutputFormat(_: AVAudioFormat) {}
+
+ func audioEncoderOutputBuffer(_ buffer: AVAudioCompressedBuffer, _ presentationTimeStamp: CMTime) {
+ guard let rtcStream else { return }
+ let sampleRate = processor.getAudioEncoder().getSampleRate() ?? 48_000
+ let sampleTime = AVAudioFramePosition(presentationTimeStamp.seconds * sampleRate)
+ let when = AVAudioTime(sampleTime: sampleTime, atRate: sampleRate)
+ Task { await rtcStream.append(buffer, when: when) }
+ }
+}
+
+extension WhipStream: VideoEncoderDelegate {
+ func videoEncoderOutputFormat(_: VideoEncoder, _: CMFormatDescription) {}
+
+ func videoEncoderOutputSampleBuffer(
+ _: VideoEncoder,
+ _ sampleBuffer: CMSampleBuffer,
+ _: CMTime
+ ) {
+ guard let rtcStream else { return }
+ Task { await rtcStream.append(sampleBuffer) }
+ }
+}
+
diff --git a/Moblin/Media/WhepClient/WhepClient.swift b/Moblin/Media/WhepClient/WhepClient.swift
new file mode 100644
index 000000000..3d5fc0f1c
--- /dev/null
+++ b/Moblin/Media/WhepClient/WhepClient.swift
@@ -0,0 +1,198 @@
+import AVFoundation
+import CoreMedia
+import Foundation
+
+import HaishinKit
+import RTCHaishinKit
+
+protocol WhepClientDelegate: AnyObject {
+ func whepClientErrorToast(title: String)
+ func whepClientConnected(cameraId: UUID)
+ func whepClientDisconnected(cameraId: UUID, reason: String)
+ func whepClientOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer)
+ func whepClientOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer)
+}
+
+private final class WhepClientStreamOutput: StreamOutput, @unchecked Sendable {
+ private let cameraId: UUID
+ private weak var delegate: (any WhepClientDelegate)?
+ private let latency: Double // seconds
+ private let lock = NSLock()
+ // Video PTS retiming (RTSP-style): basePts + (framePts - firstFramePts) + latency
+ private var basePts: Double = -1
+ private var firstFramePts: Double = -1
+ private var lastOutputPts: Double = -1
+ // Audio PTS retiming
+ private var audioBasePts: Double = -1
+ private var firstAudioPts: Double = -1
+
+ init(cameraId: UUID, delegate: (any WhepClientDelegate)?, latency: Double) {
+ self.cameraId = cameraId
+ self.delegate = delegate
+ self.latency = latency
+ }
+
+ func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ guard let audio = audio as? AVAudioPCMBuffer else {
+ return
+ }
+ let audioSeconds = AVAudioTime.seconds(forHostTime: when.hostTime)
+ lock.lock()
+ if audioBasePts < 0 {
+ audioBasePts = currentPresentationTimeStamp().seconds
+ firstAudioPts = audioSeconds
+ }
+ let newPtsSeconds = audioBasePts + (audioSeconds - firstAudioPts) + latency
+ lock.unlock()
+ let pts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000)
+ guard let sampleBuffer = audio.makeSampleBuffer(pts) else {
+ return
+ }
+ delegate?.whepClientOnAudioBuffer(cameraId: cameraId, sampleBuffer)
+ }
+
+ func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) {
+ let framePts = video.presentationTimeStamp.seconds
+ lock.lock()
+ if basePts < 0 {
+ basePts = currentPresentationTimeStamp().seconds
+ firstFramePts = framePts
+ }
+ var newPtsSeconds = basePts + (framePts - firstFramePts) + latency
+ // Ensure monotonic (never go backwards).
+ if newPtsSeconds <= lastOutputPts {
+ newPtsSeconds = lastOutputPts + 0.001
+ }
+ lastOutputPts = newPtsSeconds
+ lock.unlock()
+ let newPts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000)
+ if let retimed = video.replacePresentationTimeStamp(newPts) {
+ delegate?.whepClientOnVideoBuffer(cameraId: cameraId, retimed)
+ } else {
+ delegate?.whepClientOnVideoBuffer(cameraId: cameraId, video)
+ }
+ }
+}
+
+final class WhepClient: NSObject {
+ private let cameraId: UUID
+ private let url: URL
+ private let latency: Double
+
+ weak var delegate: (any WhepClientDelegate)?
+
+ private var session: (any Session)?
+ private var rtcStream: RTCStream?
+ private var readyStateTask: Task?
+ private var didReportConnected = false
+
+ init(cameraId: UUID, url: URL, latency: Double) {
+ self.cameraId = cameraId
+ self.url = url
+ self.latency = latency
+ super.init()
+ }
+
+ func start() {
+ Task { [weak self] in
+ guard let self else { return }
+ await self.startInternal()
+ }
+ }
+
+ func stop() {
+ Task { [weak self] in
+ guard let self else { return }
+ await self.stopInternal()
+ }
+ }
+
+ private func startInternal() async {
+ await stopInternal()
+ didReportConnected = false
+
+ do {
+ guard let session = try await SessionBuilderFactory.shared
+ .make(url)
+ .setMode(.playback)
+ .setConfiguration(nil)
+ .build()
+ else {
+ throw NSError(domain: "Moblin", code: 3, userInfo: [
+ NSLocalizedDescriptionKey: "WHEP session could not be created",
+ ])
+ }
+ self.session = session
+
+ let rtcStream = (await session.stream) as? RTCStream
+ guard let rtcStream else {
+ throw NSError(domain: "Moblin", code: 4, userInfo: [
+ NSLocalizedDescriptionKey: "WHEP session stream is not RTCStream",
+ ])
+ }
+ self.rtcStream = rtcStream
+ await rtcStream.setDirection(.recvonly)
+ await rtcStream.addOutput(WhepClientStreamOutput(cameraId: cameraId, delegate: delegate, latency: latency))
+
+ readyStateTask = Task { [weak self] in
+ guard let self else { return }
+ for await state in await session.readyState {
+ switch state {
+ case .open:
+ guard !self.didReportConnected else { break }
+ self.didReportConnected = true
+ DispatchQueue.main.async {
+ self.delegate?.whepClientConnected(cameraId: self.cameraId)
+ }
+ case .closing, .closed:
+ DispatchQueue.main.async {
+ self.delegate?.whepClientDisconnected(
+ cameraId: self.cameraId,
+ reason: String(localized: "WHEP disconnected")
+ )
+ }
+ default:
+ break
+ }
+ }
+ }
+
+ try await session.connect { [weak self] in
+ guard let self else { return }
+ DispatchQueue.main.async {
+ self.delegate?.whepClientDisconnected(
+ cameraId: self.cameraId,
+ reason: String(localized: "WHEP disconnected")
+ )
+ }
+ }
+ } catch {
+ DispatchQueue.main.async { [weak self] in
+ guard let self else { return }
+ self.delegate?.whepClientErrorToast(title: "WHEP connect failed: \(error)")
+ self.delegate?.whepClientDisconnected(cameraId: self.cameraId, reason: "\(error)")
+ }
+ await stopInternal()
+ }
+ }
+
+ private func stopInternal() async {
+ readyStateTask?.cancel()
+ readyStateTask = nil
+ didReportConnected = false
+
+ do {
+ try await session?.close()
+ } catch {
+ // Best effort close.
+ }
+
+ if let rtcStream {
+ await rtcStream.removeAllOutputs()
+ }
+ self.rtcStream = nil
+ self.session = nil
+ _ = latency // keep for potential future reconnect jitter logic
+ }
+}
+
diff --git a/Moblin/Media/WhipServer/WhipServer.swift b/Moblin/Media/WhipServer/WhipServer.swift
new file mode 100644
index 000000000..2a33988af
--- /dev/null
+++ b/Moblin/Media/WhipServer/WhipServer.swift
@@ -0,0 +1,593 @@
+import AVFoundation
+import CoreMedia
+import Foundation
+import HaishinKit
+import Network
+import RTCHaishinKit
+
+let whipServerDispatchQueue = DispatchQueue(label: "com.eerimoq.whip-server")
+
+protocol WhipServerDelegate: AnyObject {
+ func whipServerOnPublishStart(streamKey: String)
+ func whipServerOnPublishStop(streamKey: String, reason: String)
+ func whipServerOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer)
+ func whipServerOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer)
+}
+
+/// Handles audio from IncomingStream (Opus → PCM via AudioCodec) and retimes PTS.
+/// Video is handled separately by WhipServerVideoDecoder via onCompressedVideo.
+private final class WhipServerAudioOutput: StreamOutput, @unchecked Sendable {
+ private let cameraId: UUID
+ private weak var delegate: (any WhipServerDelegate)?
+ private let latency: Double
+ private let lock = NSLock()
+ private var audioBasePts: Double = -1
+ private var firstAudioPts: Double = -1
+
+ init(cameraId: UUID, delegate: (any WhipServerDelegate)?, latency: Double) {
+ self.cameraId = cameraId
+ self.delegate = delegate
+ self.latency = latency
+ }
+
+ func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ guard let audio = audio as? AVAudioPCMBuffer else {
+ return
+ }
+ let audioSeconds = AVAudioTime.seconds(forHostTime: when.hostTime)
+ lock.lock()
+ if audioBasePts < 0 {
+ audioBasePts = currentPresentationTimeStamp().seconds
+ firstAudioPts = audioSeconds
+ }
+ let newPtsSeconds = audioBasePts + (audioSeconds - firstAudioPts) + latency
+ lock.unlock()
+ let pts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000)
+ guard let sampleBuffer = audio.makeSampleBuffer(pts) else {
+ return
+ }
+ delegate?.whipServerOnAudioBuffer(cameraId: cameraId, sampleBuffer)
+ }
+
+ func stream(_: some StreamConvertible, didOutput _: CMSampleBuffer) {
+ // Video is handled by WhipServerVideoDecoder, not through RTCStream outputs.
+ }
+}
+
+/// Decodes compressed H264 video from RTCTrack and delivers decoded frames to
+/// BufferedVideo. Matches the RTMP server approach: retime PTS BEFORE decode,
+/// use Moblin's VideoDecoder, direct delivery — no MediaLink or IncomingStream.
+private final class WhipServerVideoDecoder: @unchecked Sendable {
+ private let cameraId: UUID
+ private weak var delegate: (any WhipServerDelegate)?
+ private let latency: Double
+ private let lockQueue = DispatchQueue(label: "com.eerimoq.whip-video-decoder")
+ private let decoder: VideoDecoder
+ private var basePts: Double = -1
+ private var firstFramePts: Double = -1
+ private var lastOutputPts: Double = -1
+ private var currentFormatDescription: CMFormatDescription?
+
+ init(cameraId: UUID, delegate: (any WhipServerDelegate)?, latency: Double) {
+ self.cameraId = cameraId
+ self.delegate = delegate
+ self.latency = latency
+ decoder = VideoDecoder(lockQueue: lockQueue)
+ }
+
+ func start() {
+ decoder.delegate = self
+ }
+
+ func stop() {
+ decoder.stopRunning()
+ }
+
+ /// Called from the RTCTrack callback thread with compressed H264 CMSampleBuffer.
+ func handleCompressedVideo(_ buffer: CMSampleBuffer) {
+ lockQueue.async { [weak self] in
+ self?.handleCompressedVideoInternal(buffer)
+ }
+ }
+
+ private func handleCompressedVideoInternal(_ buffer: CMSampleBuffer) {
+ // Update decoder session when format description changes (new SPS/PPS).
+ // Use setFormatDescriptionSync so the format description is available
+ // IMMEDIATELY for the decodeSampleBuffer call below (same queue).
+ if let fd = buffer.formatDescription, fd != currentFormatDescription {
+ currentFormatDescription = fd
+ decoder.setFormatDescriptionSync(fd)
+ }
+
+ // Retime PTS before decode (matches RTMP server approach).
+ let framePts = buffer.presentationTimeStamp.seconds
+ if basePts < 0 {
+ basePts = currentPresentationTimeStamp().seconds
+ firstFramePts = framePts
+ }
+ var newPtsSeconds = basePts + (framePts - firstFramePts) + latency
+ if newPtsSeconds <= lastOutputPts {
+ newPtsSeconds = lastOutputPts + 0.001
+ }
+ lastOutputPts = newPtsSeconds
+
+ // Discard stale frames whose PTS is already in the past.
+ // This prevents burst playback of accumulated frames after decode errors.
+ let now = currentPresentationTimeStamp().seconds
+ if newPtsSeconds < now - 0.1 {
+ // Frame is more than 100ms in the past — skip it.
+ // Reset base PTS so the next frame starts fresh relative to "now".
+ basePts = -1
+ firstFramePts = -1
+ lastOutputPts = -1
+ return
+ }
+
+ let newPts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000)
+ if let retimed = buffer.replacePresentationTimeStamp(newPts) {
+ decoder.decodeSampleBuffer(retimed)
+ }
+ }
+}
+
+extension WhipServerVideoDecoder: VideoDecoderDelegate {
+ func videoDecoderOutputSampleBuffer(_: VideoDecoder, _ sampleBuffer: CMSampleBuffer) {
+ delegate?.whipServerOnVideoBuffer(cameraId: cameraId, sampleBuffer)
+ }
+}
+
+private final class WhipServerSession: NSObject, RTCPeerConnectionDelegate {
+ let streamKey: String
+ let cameraId: UUID
+ let peerConnection: RTCPeerConnection
+ let stream: RTCStream
+ let videoDecoder: WhipServerVideoDecoder
+ weak var delegate: (any WhipServerDelegate)?
+ private let onTerminated: @Sendable () -> Void
+ private var terminated = false
+ private var didConnect = false
+ private var pendingTerminateWorkItem: DispatchWorkItem?
+ private let localCandidatesLock = NSLock()
+ private var localCandidates: [RTCIceCandidate] = []
+
+ init(
+ streamKey: String,
+ cameraId: UUID,
+ peerConnection: RTCPeerConnection,
+ stream: RTCStream,
+ videoDecoder: WhipServerVideoDecoder,
+ delegate: (any WhipServerDelegate)?,
+ onTerminated: @escaping @Sendable () -> Void
+ ) {
+ self.streamKey = streamKey
+ self.cameraId = cameraId
+ self.peerConnection = peerConnection
+ self.stream = stream
+ self.videoDecoder = videoDecoder
+ self.delegate = delegate
+ self.onTerminated = onTerminated
+ super.init()
+ peerConnection.delegate = self
+ peerConnection.attachIncomingStream(stream)
+ }
+
+ func close(reason: String) {
+ terminate(reason: reason)
+ videoDecoder.stop()
+ peerConnection.close()
+ Task { await stream.close() }
+ }
+
+ func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged connectionState: RTCPeerConnection.ConnectionState) {
+ logger.info("whip-server: \(streamKey) state=\(connectionState)")
+ switch connectionState {
+ case .connected:
+ didConnect = true
+ pendingTerminateWorkItem?.cancel()
+ pendingTerminateWorkItem = nil
+ delegate?.whipServerOnPublishStart(streamKey: streamKey)
+ case .closed, .failed, .disconnected:
+ // Some WHIP clients (e.g. ffmpeg) send 0 candidates in the initial offer and then trickle via PATCH.
+ // libdatachannel may temporarily report a failed/disconnected state before remote candidates arrive.
+ // Give it a short grace period before tearing down the session.
+ if didConnect {
+ terminate(reason: "\(connectionState)")
+ } else {
+ pendingTerminateWorkItem?.cancel()
+ let work = DispatchWorkItem { [weak self] in
+ guard let self else { return }
+ self.terminate(reason: "\(connectionState)")
+ }
+ pendingTerminateWorkItem = work
+ whipServerDispatchQueue.asyncAfter(deadline: .now() + 3.0, execute: work)
+ }
+ default:
+ break
+ }
+ }
+
+ func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged iceGatheringState: RTCPeerConnection.IceGatheringState) {}
+ func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState) {}
+ func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState) {}
+ func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {}
+ func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate) {
+ let line = candidated.candidate.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard !line.isEmpty else {
+ return
+ }
+ let lower = line.lowercased()
+ if lower.contains(" tcp ") || lower.contains(" fe80:") {
+ return
+ }
+ logger.info("whip-server: \(streamKey) local-candidate mid=\(candidated.mid) \(line)")
+ localCandidatesLock.lock()
+ localCandidates.append(candidated)
+ localCandidatesLock.unlock()
+ }
+
+ func getLocalCandidates() -> [RTCIceCandidate] {
+ localCandidatesLock.lock()
+ defer { localCandidatesLock.unlock() }
+ return localCandidates
+ }
+
+ private func terminate(reason: String) {
+ guard !terminated else {
+ return
+ }
+ terminated = true
+ delegate?.whipServerOnPublishStop(streamKey: streamKey, reason: reason)
+ onTerminated()
+ }
+}
+
+final class WhipServer {
+ weak var delegate: (any WhipServerDelegate)?
+ var settings: SettingsWhipServer
+
+ private let httpServer: HttpServer
+ private var sessionsByStreamKey: [String: WhipServerSession] = [:]
+
+ init(settings: SettingsWhipServer) {
+ self.settings = settings
+ httpServer = HttpServer(queue: whipServerDispatchQueue, routes: [])
+ rebuildRoutes()
+ }
+
+ func start() {
+ rebuildRoutes()
+ httpServer.start(port: NWEndpoint.Port(rawValue: settings.port) ?? .http)
+ }
+
+ func stop() {
+ whipServerDispatchQueue.async {
+ self.httpServer.stop()
+ for (_, session) in self.sessionsByStreamKey {
+ session.close(reason: "Server stop")
+ }
+ self.sessionsByStreamKey.removeAll()
+ }
+ }
+
+ func isStreamConnected(streamKey: String) -> Bool {
+ whipServerDispatchQueue.sync {
+ sessionsByStreamKey[streamKey] != nil
+ }
+ }
+
+ private func rebuildRoutes() {
+ var routes: [HttpServerRoute] = []
+ for stream in settings.streams {
+ let path = "/whip/\(stream.streamKey)"
+ routes.append(HttpServerRoute(path: path) { [weak self] request, response in
+ self?.handleRequest(stream: stream, request: request, response: response)
+ })
+ }
+ httpServer.setRoutes(routes)
+ }
+
+ private func handleRequest(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) {
+ switch request.method.uppercased() {
+ case "POST":
+ handlePost(stream: stream, request: request, response: response)
+ case "PATCH":
+ handlePatch(stream: stream, request: request, response: response)
+ case "DELETE":
+ handleDelete(stream: stream, response: response)
+ default:
+ response.send(text: "", status: .methodNotAllowed)
+ }
+ }
+
+ private func handleDelete(stream: SettingsWhipServerStream, response: HttpServerResponse) {
+ whipServerDispatchQueue.async {
+ if let session = self.sessionsByStreamKey[stream.streamKey] {
+ session.close(reason: "Client delete")
+ self.sessionsByStreamKey[stream.streamKey] = nil
+ }
+ response.send(text: "", status: .ok)
+ }
+ }
+
+ private func handlePatch(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) {
+ // Trickle ICE: application/trickle-ice-sdpfrag (RFC 8840)
+ whipServerDispatchQueue.async {
+ guard let session = self.sessionsByStreamKey[stream.streamKey] else {
+ response.send(text: "", status: .notFound)
+ return
+ }
+ guard let frag = String(data: request.body, encoding: .utf8), !frag.isEmpty else {
+ response.send(text: "", status: .badRequest)
+ return
+ }
+ do {
+ let (candidates, mid) = Self.parseTrickleIceSdpFrag(frag)
+ logger.info("whip-server: patch streamKey=\(stream.streamKey) mid=\(mid ?? "-") candidates=\(candidates.count)")
+ for candidate in candidates {
+ try session.peerConnection.addRemoteCandidate(candidate, mid: mid)
+ }
+ response.send(text: "", status: .noContent)
+ } catch {
+ logger.info("whip-server: patch error: \(error)")
+ response.send(text: "", status: .badRequest)
+ }
+ }
+ }
+
+ private func handlePost(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) {
+ if let contentType = request.header("content-type"), !contentType.hasPrefix("application/sdp") {
+ response.send(text: "", status: .unsupportedMediaType)
+ return
+ }
+ guard let offer = String(data: request.body, encoding: .utf8), !offer.isEmpty else {
+ response.send(text: "", status: .badRequest)
+ return
+ }
+ whipServerDispatchQueue.async {
+ if let existing = self.sessionsByStreamKey[stream.streamKey] {
+ existing.close(reason: "Replaced by new publisher")
+ self.sessionsByStreamKey[stream.streamKey] = nil
+ }
+ Task {
+ do {
+ let (sanitizedOffer, removedCandidates) = Self.sanitizeOfferSdp(offer)
+ logger.info(
+ "whip-server: received offer for \(stream.streamKey) (\(offer.count) bytes), " +
+ "candidates=\(Self.countCandidates(offer)), removedCandidates=\(removedCandidates)"
+ )
+ let latency = min(Double(stream.latency) / 1000.0, 0.5)
+
+ // --- Video path (RTMP-style): compressed RTP → retime PTS → VideoDecoder → BufferedVideo ---
+ let videoDecoder = WhipServerVideoDecoder(
+ cameraId: stream.id,
+ delegate: self.delegate,
+ latency: latency
+ )
+ videoDecoder.start()
+
+ // --- Audio path: RTCTrack → IncomingStream/AudioCodec (Opus→PCM) → WhipServerAudioOutput ---
+ let rtcStream = RTCStream()
+ await rtcStream.setDirection(.recvonly)
+ await rtcStream.addOutput(WhipServerAudioOutput(
+ cameraId: stream.id,
+ delegate: self.delegate,
+ latency: latency
+ ))
+
+ let peerConnection = try RTCPeerConnection(RTCConfiguration())
+ // Set onCompressedVideo BEFORE setRemoteDescription so that when
+ // libdatachannel fires the track callback, video tracks are routed
+ // to our VideoDecoder. Audio tracks go to incomingStream (RTCStream).
+ peerConnection.onCompressedVideo = { [weak videoDecoder] buffer in
+ videoDecoder?.handleCompressedVideo(buffer)
+ }
+ peerConnection.attachIncomingStream(rtcStream)
+ let session = WhipServerSession(
+ streamKey: stream.streamKey,
+ cameraId: stream.id,
+ peerConnection: peerConnection,
+ stream: rtcStream,
+ videoDecoder: videoDecoder,
+ delegate: self.delegate,
+ onTerminated: { [weak self] in
+ whipServerDispatchQueue.async {
+ self?.sessionsByStreamKey[stream.streamKey] = nil
+ }
+ }
+ )
+
+ try peerConnection.setRemoteDesciption(sanitizedOffer, type: .offer)
+ let answer = await self.waitForLocalDescription(peerConnection: peerConnection, timeoutSeconds: 2.0)
+ await self.waitForIceGatheringComplete(peerConnection: peerConnection, timeoutSeconds: 5.0)
+
+ // libdatachannel may not embed gathered candidates into the local SDP string
+ // even when gathering is complete (trickle-only behavior). WHIP endpoints must
+ // include their ICE candidates in the SDP answer, so we inject candidates that
+ // arrive via the local-candidate callback.
+ let baseAnswer = peerConnection.localDescriptionSdp.isEmpty ? answer : peerConnection.localDescriptionSdp
+ let injected = Self.injectCandidatesIntoAnswerSdp(
+ baseAnswer,
+ candidates: session.getLocalCandidates()
+ )
+ let finalAnswer = injected
+ guard !finalAnswer.isEmpty else {
+ throw RTCError.notAvail
+ }
+ logger.info(
+ "whip-server: generated answer for \(stream.streamKey) (\(finalAnswer.count) bytes), " +
+ "iceGathering=\(peerConnection.iceGatheringState), candidates=\(Self.countCandidates(finalAnswer))"
+ )
+
+ whipServerDispatchQueue.async {
+ self.sessionsByStreamKey[stream.streamKey] = session
+ let path = "/whip/\(stream.streamKey)"
+ let location: String
+ if let host = request.header("host"), !host.isEmpty {
+ location = "http://\(host)\(path)"
+ } else {
+ location = path
+ }
+ response.send(
+ text: finalAnswer,
+ status: .created,
+ contentType: "application/sdp",
+ headers: [("Location", location)]
+ )
+ }
+ } catch {
+ logger.info("whip-server: \(error)")
+ response.send(text: "", status: .internalServerError)
+ }
+ }
+ }
+ }
+
+ private func waitForIceGatheringComplete(peerConnection: RTCPeerConnection, timeoutSeconds: Double) async {
+ let deadline = Date().addingTimeInterval(timeoutSeconds)
+ while peerConnection.iceGatheringState != .complete && Date() < deadline {
+ try? await Task.sleep(for: .milliseconds(50))
+ }
+ }
+
+ private func waitForLocalDescription(peerConnection: RTCPeerConnection, timeoutSeconds: Double) async -> String {
+ let deadline = Date().addingTimeInterval(timeoutSeconds)
+ while peerConnection.localDescriptionSdp.isEmpty && Date() < deadline {
+ try? await Task.sleep(for: .milliseconds(25))
+ }
+ return peerConnection.localDescriptionSdp
+ }
+
+ private static func parseTrickleIceSdpFrag(_ frag: String) -> (candidates: [String], mid: String?) {
+ var candidates: [String] = []
+ var mid: String?
+ for rawLine in frag.split(separator: "\n", omittingEmptySubsequences: false) {
+ let line = rawLine.trimmingCharacters(in: .whitespacesAndNewlines)
+ if line.hasPrefix("a=mid:") {
+ mid = String(line.dropFirst("a=mid:".count))
+ } else if line.hasPrefix("a=candidate:") || line.hasPrefix("candidate:") {
+ // Heuristic: libdatachannel often can't send to IPv6 link-local candidates (missing scope),
+ // and TCP candidates are not useful for our LAN ingest use-case.
+ let lower = line.lowercased()
+ if lower.contains(" tcp ") || lower.contains(" fe80:") {
+ continue
+ }
+ candidates.append(line)
+ }
+ }
+ return (candidates, mid)
+ }
+
+ private static func countCandidates(_ sdp: String) -> Int {
+ return sdp.split(separator: "\n").filter {
+ let line = $0.trimmingCharacters(in: .whitespacesAndNewlines)
+ return line.hasPrefix("a=candidate:") || line.hasPrefix("candidate:")
+ }.count
+ }
+
+ private static func sanitizeOfferSdp(_ offer: String) -> (sdp: String, removedCandidates: Int) {
+ var removed = 0
+ let lines = offer.split(separator: "\n", omittingEmptySubsequences: false).map(String.init)
+ let kept: [String] = lines.filter { line in
+ let trimmed = line.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard trimmed.hasPrefix("a=candidate:") else {
+ return true
+ }
+ let lower = trimmed.lowercased()
+ if lower.contains(" tcp ") || lower.contains(" fe80:") {
+ removed += 1
+ return false
+ }
+ return true
+ }
+ return (kept.joined(separator: "\n"), removed)
+ }
+
+ private static func injectCandidatesIntoAnswerSdp(_ sdp: String, candidates: [RTCIceCandidate]) -> String {
+ guard !sdp.isEmpty, !candidates.isEmpty else {
+ return sdp
+ }
+ // If SDP already contains candidates, keep it (avoid duplicates).
+ if countCandidates(sdp) > 0 {
+ return sdp
+ }
+
+ var lines = sdp.split(separator: "\n", omittingEmptySubsequences: false).map(String.init)
+ let mediaStarts = lines.indices.filter { lines[$0].trimmingCharacters(in: .whitespacesAndNewlines).hasPrefix("m=") }
+ guard let firstMediaStart = mediaStarts.first else {
+ // No media sections; just append at end best-effort.
+ var appended = lines
+ for c in candidates {
+ let l = normalizeCandidateLine(c.candidate)
+ if !l.isEmpty { appended.append(l) }
+ }
+ appended.append("a=end-of-candidates")
+ return appended.joined(separator: "\n")
+ }
+
+ // Map mid -> insertion section (start index of that m= section).
+ var sectionByMid: [String: Int] = [:]
+ for i in 0..)
+ for sectionStart in sortedSections {
+ guard let insertLines = candidatesBySection[sectionStart], !insertLines.isEmpty else { continue }
+
+ let sectionIndex = mediaStarts.firstIndex(of: sectionStart) ?? 0
+ let sectionEnd = (sectionIndex + 1 < mediaStarts.count) ? mediaStarts[sectionIndex + 1] : lines.count
+
+ // Insert near end of section, before next m=.
+ var insertAt = sectionEnd
+ // Keep end-of-candidates inside section.
+ let alreadyHasEnd = lines[sectionStart..= 0 && lines[$0].trimmingCharacters(in: .whitespacesAndNewlines) == "a=end-of-candidates"
+ }) {
+ insertAt = idx
+ }
+ }
+
+ lines.insert(contentsOf: insertLines, at: insertAt)
+ if !alreadyHasEnd {
+ lines.insert("a=end-of-candidates", at: insertAt + insertLines.count)
+ }
+ }
+
+ return lines.joined(separator: "\n")
+ }
+
+ private static func normalizeCandidateLine(_ raw: String) -> String {
+ let trimmed = raw.trimmingCharacters(in: .whitespacesAndNewlines)
+ if trimmed.isEmpty { return "" }
+ if trimmed.hasPrefix("a=candidate:") { return trimmed }
+ if trimmed.hasPrefix("candidate:") { return "a=\(trimmed)" }
+ if trimmed.contains("candidate:") {
+ // Best effort: ensure it's an SDP attribute.
+ return trimmed.hasPrefix("a=") ? trimmed : "a=\(trimmed)"
+ }
+ return ""
+ }
+}
+
diff --git a/Moblin/Moblin.entitlements b/Moblin/Moblin.entitlements
index 8ed677a12..ccb07d2f4 100644
--- a/Moblin/Moblin.entitlements
+++ b/Moblin/Moblin.entitlements
@@ -2,26 +2,26 @@
- com.apple.developer.healthkit
-
- com.apple.developer.healthkit.access
-
- com.apple.developer.networking.wifi-info
-
- com.apple.developer.siri
-
- com.apple.developer.weatherkit
-
- com.apple.developer.wifi-aware
-
- Subscribe
- Publish
-
- com.apple.external-accessory.wireless-configuration
-
- com.apple.security.application-groups
-
- group.com.eerimoq.Moblin
-
+ com.apple.developer.healthkit
+
+ com.apple.developer.healthkit.access
+
+ com.apple.developer.networking.wifi-info
+
+ com.apple.developer.siri
+
+ com.apple.developer.weatherkit
+
+ com.apple.developer.wifi-aware
+
+ Subscribe
+ Publish
+
+ com.apple.external-accessory.wireless-configuration
+
+ com.apple.security.application-groups
+
+ group.io.wemo.mocamapp
+
diff --git a/Moblin/MoblinApp.swift b/Moblin/MoblinApp.swift
index 88edddad2..896dde14a 100644
--- a/Moblin/MoblinApp.swift
+++ b/Moblin/MoblinApp.swift
@@ -1,3 +1,5 @@
+import HaishinKit
+import RTCHaishinKit
import SwiftUI
@main
@@ -9,6 +11,9 @@ struct MoblinApp: App {
init() {
MoblinApp.globalModel = Model()
_model = StateObject(wrappedValue: MoblinApp.globalModel!)
+ Task {
+ await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+ }
}
var body: some Scene {
diff --git a/Moblin/Various/Media.swift b/Moblin/Various/Media.swift
index 756d0f37a..2b83e2c7b 100644
--- a/Moblin/Various/Media.swift
+++ b/Moblin/Various/Media.swift
@@ -23,6 +23,8 @@ protocol MediaDelegate: AnyObject {
func mediaOnRtmpDestinationDisconnected(_ destination: String)
func mediaOnRistConnected()
func mediaOnRistDisconnected()
+ func mediaOnWhipConnected()
+ func mediaOnWhipDisconnected(_ reason: String)
func mediaOnAudioMuteChange()
func mediaOnAudioBuffer(_ sampleBuffer: CMSampleBuffer)
func mediaOnLowFpsImage(_ lowFpsImage: Data?, _ frameNumber: UInt64)
@@ -53,6 +55,7 @@ final class Media: NSObject {
private var srtStreamNew: SrtStreamMoblin?
private var srtStreamOld: SrtStreamOfficial?
private var ristStream: RistStream?
+ private var whipStream: WhipStream?
private var srtlaClient: SrtlaClient?
private var processor: Processor?
private var srtTotalByteCount: Int64 = 0
@@ -101,10 +104,12 @@ final class Media: NSObject {
srtStopStream()
rtmpStopStream()
ristStopStream()
+ whipStopStream()
rtmpStreams.removeAll()
srtStreamNew = nil
srtStreamOld = nil
ristStream = nil
+ whipStream = nil
processor = nil
}
@@ -120,6 +125,7 @@ final class Media: NSObject {
srtStopStream()
rtmpStopStream()
ristStopStream()
+ whipStopStream()
let processor = Processor()
switch proto {
case .rtmp:
@@ -138,6 +144,7 @@ final class Media: NSObject {
srtStreamNew = nil
srtStreamOld = nil
ristStream = nil
+ whipStream = nil
case .srt:
switch srtImplementation {
case .moblin:
@@ -157,11 +164,19 @@ final class Media: NSObject {
}
rtmpStreams.removeAll()
ristStream = nil
+ whipStream = nil
case .rist:
ristStream = RistStream(processor: processor, timecodesEnabled: timecodesEnabled, delegate: self)
srtStreamNew = nil
srtStreamOld = nil
rtmpStreams.removeAll()
+ whipStream = nil
+ case .whip:
+ whipStream = WhipStream(processor: processor, delegate: self)
+ srtStreamNew = nil
+ srtStreamOld = nil
+ rtmpStreams.removeAll()
+ ristStream = nil
}
self.processor = processor
processor.setDelegate(delegate: self)
@@ -607,6 +622,14 @@ final class Media: NSObject {
ristStream?.stop()
}
+ func whipStartStream(endpointUrl: URL, settings: SettingsStreamWhip, videoDimensions: CMVideoDimensions) {
+ whipStream?.start(endpointUrl: endpointUrl, settings: settings, videoDimensions: videoDimensions)
+ }
+
+ func whipStopStream() {
+ whipStream?.stop()
+ }
+
func setTorch(on: Bool) {
processor?.setTorch(value: on)
}
@@ -1129,6 +1152,16 @@ extension Media: RistStreamDelegate {
}
}
+extension Media: WhipStreamDelegate {
+ func whipStreamOnConnected() {
+ delegate?.mediaOnWhipConnected()
+ }
+
+ func whipStreamOnDisconnected(reason: String) {
+ delegate?.mediaOnWhipDisconnected(reason)
+ }
+}
+
extension Media: SrtStreamMoblinDelegate {
func srtStreamMoblinConnected() {
DispatchQueue.main.async {
diff --git a/Moblin/Various/Model/Model.swift b/Moblin/Various/Model/Model.swift
index 9c5a983f9..377d7956b 100644
--- a/Moblin/Various/Model/Model.swift
+++ b/Moblin/Various/Model/Model.swift
@@ -146,9 +146,11 @@ class Raid: ObservableObject {
class Ingests: ObservableObject {
var rtmp: RtmpServer?
+ var whip: WhipServer?
var srtla: SrtlaServer?
var rist: RistServer?
var rtsp: [RtspClient] = []
+ var whep: [WhepClient] = []
@Published var speedAndTotal = noValue
}
@@ -1062,9 +1064,11 @@ final class Model: NSObject, ObservableObject, @unchecked Sendable {
object: nil)
updateOrientation()
reloadRtmpServer()
+ reloadWhipServer()
reloadSrtlaServer()
reloadRistServer()
reloadRtspClient()
+ reloadWhepClient()
ipMonitor.pathUpdateHandler = handleIpStatusUpdate
ipMonitor.start()
NotificationCenter.default.addObserver(self,
@@ -1408,6 +1412,7 @@ final class Model: NSObject, ObservableObject, @unchecked Sendable {
reloadSrtlaServer()
reloadRistServer()
reloadRtspClient()
+ reloadWhepClient()
chatTextToSpeech.reset(running: true)
startWeatherManager()
startGeographyManager()
diff --git a/Moblin/Various/Model/ModelCamera.swift b/Moblin/Various/Model/ModelCamera.swift
index 6bc5792f4..f10b38b1d 100644
--- a/Moblin/Various/Model/ModelCamera.swift
+++ b/Moblin/Various/Model/ModelCamera.swift
@@ -560,6 +560,9 @@ extension Model {
cameras += rtmpCameras().map {
($0.0.uuidString, $0.1)
}
+ cameras += whipCameras().map {
+ ($0.0.uuidString, $0.1)
+ }
cameras += srtlaCameras().map {
($0.0.uuidString, $0.1)
}
@@ -569,6 +572,9 @@ extension Model {
cameras += rtspCameras().map {
($0.0.uuidString, $0.1)
}
+ cameras += whepCameras().map {
+ ($0.0.uuidString, $0.1)
+ }
cameras += playerCameras().map {
($0.0.uuidString, $0.1)
}
@@ -618,10 +624,14 @@ extension Model {
return .srtla(id: id)
} else if let id = getRtmpStream(idString: cameraId)?.id {
return .rtmp(id: id)
+ } else if let id = getWhipStream(idString: cameraId)?.id {
+ return .whip(id: id)
} else if let id = getRistStream(idString: cameraId)?.id {
return .rist(id: id)
} else if let id = getRtspStream(idString: cameraId)?.id {
return .rtsp(id: id)
+ } else if let id = getWhepStream(idString: cameraId)?.id {
+ return .whep(id: id)
} else if let id = getMediaPlayer(idString: cameraId)?.id {
return .mediaPlayer(id: id)
} else if isBackCamera(cameraId: cameraId) {
@@ -650,12 +660,16 @@ extension Model {
switch settingsCameraId {
case let .rtmp(id):
return id.uuidString
+ case let .whip(id):
+ return id.uuidString
case let .srtla(id):
return id.uuidString
case let .rist(id: id):
return id.uuidString
case let .rtsp(id: id):
return id.uuidString
+ case let .whep(id: id):
+ return id.uuidString
case let .mediaPlayer(id):
return id.uuidString
case let .external(id, _):
@@ -700,12 +714,16 @@ extension Model {
switch settingsCameraId {
case let .rtmp(id):
return getRtmpStream(id: id)?.camera() ?? unknownSad
+ case let .whip(id):
+ return getWhipStream(id: id)?.camera() ?? unknownSad
case let .srtla(id):
return getSrtlaStream(id: id)?.camera() ?? unknownSad
case let .rist(id):
return getRistStream(id: id)?.camera() ?? unknownSad
case let .rtsp(id):
return getRtspStream(id: id)?.camera() ?? unknownSad
+ case let .whep(id):
+ return getWhepStream(id: id)?.camera() ?? unknownSad
case let .mediaPlayer(id):
return getMediaPlayer(id: id)?.camera() ?? unknownSad
case let .external(_, name):
@@ -809,12 +827,16 @@ extension Model {
switch cameraId {
case let .rtmp(id: id):
return id
+ case let .whip(id: id):
+ return id
case let .srtla(id: id):
return id
case let .rist(id: id):
return id
case let .rtsp(id: id):
return id
+ case let .whep(id: id):
+ return id
case let .mediaPlayer(id: id):
return id
case .screenCapture:
diff --git a/Moblin/Various/Model/ModelScene.swift b/Moblin/Various/Model/ModelScene.swift
index 963d9d14e..f00f04b09 100644
--- a/Moblin/Various/Model/ModelScene.swift
+++ b/Moblin/Various/Model/ModelScene.swift
@@ -38,7 +38,7 @@ struct WidgetInScene: Identifiable {
extension Model {
func getTextEffects(id: UUID) -> [TextEffect] {
var effects: [TextEffect] = []
- if let effect = textEffects[id] {
+ if let effect = textEffects.first(where: { $0.key == id })?.value {
effects.append(effect)
}
for slideshow in slideshowEffects.values {
@@ -52,39 +52,39 @@ extension Model {
}
func getVideoSourceEffect(id: UUID) -> VideoSourceEffect? {
- return videoSourceEffects[id]
+ return videoSourceEffects.first(where: { $0.key == id })?.value
}
func getVTuberEffect(id: UUID) -> VTuberEffect? {
- return vTuberEffects[id]
+ return vTuberEffects.first(where: { $0.key == id })?.value
}
func getPngTuberEffect(id: UUID) -> PngTuberEffect? {
- return pngTuberEffects[id]
+ return pngTuberEffects.first(where: { $0.key == id })?.value
}
func getSnapshotEffect(id: UUID) -> SnapshotEffect? {
- return snapshotEffects[id]
+ return snapshotEffects.first(where: { $0.key == id })?.value
}
func getChatEffect(id: UUID) -> ChatEffect? {
- return chatEffects[id]
+ return chatEffects.first(where: { $0.key == id })?.value
}
func getQrCodeEffect(id: UUID) -> QrCodeEffect? {
- return qrCodeEffects[id]
+ return qrCodeEffects.first(where: { $0.key == id })?.value
}
func getWheelOfLuckEffect(id: UUID) -> WheelOfLuckEffect? {
- return wheelOfLuckEffects[id]
+ return wheelOfLuckEffects.first(where: { $0.key == id })?.value
}
func getBingoCardEffect(id: UUID) -> BingoCardEffect? {
- return bingoCardEffects[id]
+ return bingoCardEffects.first(where: { $0.key == id })?.value
}
func getScoreboardEffect(id: UUID) -> ScoreboardEffect? {
- return scoreboardEffects[id]
+ return scoreboardEffects.first(where: { $0.key == id })?.value
}
func getWidgetShapeEffect(_ widget: SettingsWidget, _ effect: SettingsVideoEffect) -> ShapeEffect? {
@@ -249,17 +249,28 @@ extension Model {
streamOverlay.isFrontCameraSelected = true
case .rtmp:
attachBufferedCamera(cameraId: scene.videoSource.rtmpCameraId, scene: scene)
+ case .whip:
+ attachBufferedCamera(cameraId: scene.videoSource.whipCameraId, scene: scene)
case .srtla:
attachBufferedCamera(cameraId: scene.videoSource.srtlaCameraId, scene: scene)
case .rist:
attachBufferedCamera(cameraId: scene.videoSource.ristCameraId, scene: scene)
case .rtsp:
attachBufferedCamera(cameraId: scene.videoSource.rtspCameraId, scene: scene)
+ case .whep:
+ attachBufferedCamera(cameraId: scene.videoSource.whepCameraId, scene: scene)
case .mediaPlayer:
mediaPlayers[scene.videoSource.mediaPlayerCameraId]?.activate()
attachBufferedCamera(cameraId: scene.videoSource.mediaPlayerCameraId, scene: scene)
case .external:
- attachExternalCamera(scene: scene)
+ // Backward-compat: WHIP/WHEP used to be stored as "external" camera IDs (uuidString).
+ if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhipStream(id: id) != nil {
+ attachBufferedCamera(cameraId: id, scene: scene)
+ } else if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhepStream(id: id) != nil {
+ attachBufferedCamera(cameraId: id, scene: scene)
+ } else {
+ attachExternalCamera(scene: scene)
+ }
case .screenCapture:
attachBufferedCamera(cameraId: screenCaptureCameraId, scene: scene)
case .backTripleLowEnergy:
@@ -429,14 +440,24 @@ extension Model {
switch scene.videoSource.cameraPosition {
case .rtmp:
return activeBufferedVideoIds.contains(scene.videoSource.rtmpCameraId)
+ case .whip:
+ return activeBufferedVideoIds.contains(scene.videoSource.whipCameraId)
case .srtla:
return activeBufferedVideoIds.contains(scene.videoSource.srtlaCameraId)
case .rist:
return activeBufferedVideoIds.contains(scene.videoSource.ristCameraId)
case .rtsp:
return activeBufferedVideoIds.contains(scene.videoSource.rtspCameraId)
+ case .whep:
+ return activeBufferedVideoIds.contains(scene.videoSource.whepCameraId)
case .external:
- return isExternalCameraConnected(id: scene.videoSource.externalCameraId)
+ if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhipStream(id: id) != nil {
+ return activeBufferedVideoIds.contains(id)
+ } else if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhepStream(id: id) != nil {
+ return activeBufferedVideoIds.contains(id)
+ } else {
+ return isExternalCameraConnected(id: scene.videoSource.externalCameraId)
+ }
default:
return true
}
@@ -604,15 +625,15 @@ extension Model {
}
private func getImageEffect(id: UUID) -> ImageEffect? {
- return imageEffects[id]
+ return imageEffects.first(where: { $0.key == id })?.value
}
private func getBrowserEffect(id: UUID) -> BrowserEffect? {
- return browserEffects[id]
+ return browserEffects.first(where: { $0.key == id })?.value
}
private func getMapEffect(id: UUID) -> MapEffect? {
- return mapEffects[id]
+ return mapEffects.first(where: { $0.key == id })?.value
}
private func resetVideoEffects(widgets: [SettingsWidget]) {
@@ -1129,7 +1150,7 @@ extension Model {
_ widget: SettingsWidget,
_ effects: inout [VideoEffect]
) {
- guard let effect = getScoreboardEffect(id: widget.id), !effects.contains(effect) else {
+ guard let effect = scoreboardEffects[widget.id], !effects.contains(effect) else {
return
}
effect.setSceneWidget(sceneWidget: sceneWidget.clone())
diff --git a/Moblin/Various/Model/ModelStream.swift b/Moblin/Various/Model/ModelStream.swift
index 9455250f6..2c98f1609 100644
--- a/Moblin/Various/Model/ModelStream.swift
+++ b/Moblin/Various/Model/ModelStream.swift
@@ -50,6 +50,7 @@ class CreateStreamWizard: ObservableObject {
@Published var customRtmpUrl = ""
@Published var customRtmpStreamKey = ""
@Published var customRistUrl = ""
+ @Published var customWhipUrl = ""
}
enum StreamState {
@@ -82,6 +83,24 @@ extension Model {
)
return
}
+ if stream.getProtocol() == .whip {
+ if stream.codec != .h264avc || stream.audioCodec != .opus {
+ makeErrorToast(
+ title: String(localized: "WHIP requires H.264 video and Opus audio."),
+ subTitle: String(
+ localized: "Update Settings → Streams → \(stream.name) → Video/Audio."
+ )
+ )
+ return
+ }
+ if stream.resolvedWhipEndpointUrl() == nil {
+ makeErrorToast(
+ title: String(localized: "Malformed WHIP URL"),
+ subTitle: String(localized: "Please use a valid whip:// URL.")
+ )
+ return
+ }
+ }
if database.location.resetWhenGoingLive {
resetLocationData()
}
@@ -189,6 +208,8 @@ extension Model {
startNetStreamSrt()
case .rist:
startNetStreamRist()
+ case .whip:
+ startNetStreamWhip()
}
updateSpeed(now: .now)
streamBecameBrokenTime = nil
@@ -234,12 +255,25 @@ extension Model {
updateAdaptiveBitrateRistIfEnabled()
}
+ private func startNetStreamWhip() {
+ guard let endpointUrl = stream.resolvedWhipEndpointUrl() else {
+ onDisconnected(reason: "WHIP endpoint URL invalid")
+ return
+ }
+ media.whipStartStream(
+ endpointUrl: endpointUrl,
+ settings: stream.whip,
+ videoDimensions: stream.dimensions()
+ )
+ }
+
func stopNetStream() {
moblink.streamer?.stopTunnels()
reconnectTimer.stop()
media.rtmpStopStream()
media.srtStopStream()
media.ristStopStream()
+ media.whipStopStream()
streamStartTime = nil
updateStreamUptime(now: .now)
updateSpeed(now: .now)
@@ -531,6 +565,14 @@ extension Model {
}
}
+ private func handleWhipConnected() {
+ onConnected()
+ }
+
+ private func handleWhipDisconnected(reason: String) {
+ onDisconnected(reason: reason)
+ }
+
private func handleAudioBuffer(sampleBuffer: CMSampleBuffer) {
DispatchQueue.main.async {
self.speechToText?.append(sampleBuffer: sampleBuffer)
@@ -878,6 +920,14 @@ extension Model: MediaDelegate {
handleRistDisconnected()
}
+ func mediaOnWhipConnected() {
+ handleWhipConnected()
+ }
+
+ func mediaOnWhipDisconnected(_ reason: String) {
+ handleWhipDisconnected(reason: reason)
+ }
+
func mediaOnAudioMuteChange() {
updateAudioLevel()
}
diff --git a/Moblin/Various/Model/ModelStreamWizard.swift b/Moblin/Various/Model/ModelStreamWizard.swift
index babc83f25..e3e673ad0 100644
--- a/Moblin/Various/Model/ModelStreamWizard.swift
+++ b/Moblin/Various/Model/ModelStreamWizard.swift
@@ -22,6 +22,7 @@ enum WizardCustomProtocol {
case srt
case rtmp
case rist
+ case whip
func toDefaultCodec() -> SettingsStreamCodec {
switch self {
@@ -33,6 +34,8 @@ enum WizardCustomProtocol {
return .h264avc
case .rist:
return .h265hevc
+ case .whip:
+ return .h264avc
}
}
}
@@ -74,6 +77,8 @@ extension Model {
return url.url?.absoluteString
case .rist:
return createStreamWizard.customRistUrl.trim()
+ case .whip:
+ return createStreamWizard.customWhipUrl.trim()
}
return nil
}
@@ -168,6 +173,9 @@ extension Model {
case .myServers:
stream.codec = createStreamWizard.customProtocol.toDefaultCodec()
}
+ if createStreamWizard.customProtocol == .whip {
+ stream.audioCodec = .opus
+ }
stream.audioBitrate = 128_000
database.streams.append(stream)
setCurrentStream(stream: stream)
@@ -201,6 +209,7 @@ extension Model {
createStreamWizard.directIngest = ""
createStreamWizard.directStreamKey = ""
createStreamWizard.belaboxUrl = ""
+ createStreamWizard.customWhipUrl = ""
}
func handleSettingsUrlsInWizard(settings: MoblinSettingsUrl) {
diff --git a/Moblin/Various/Model/ModelWhepClient.swift b/Moblin/Various/Model/ModelWhepClient.swift
new file mode 100644
index 000000000..23bf1e05a
--- /dev/null
+++ b/Moblin/Various/Model/ModelWhepClient.swift
@@ -0,0 +1,89 @@
+import CoreMedia
+import Foundation
+
+extension Model {
+ func whepCameras() -> [(UUID, String)] {
+ return database.whepClient.streams.map { stream in
+ (stream.id, stream.camera())
+ }
+ }
+
+ func getWhepStream(id: UUID) -> SettingsWhepClientStream? {
+ return database.whepClient.streams.first { stream in
+ stream.id == id
+ }
+ }
+
+ func getWhepStream(idString: String) -> SettingsWhepClientStream? {
+ return database.whepClient.streams.first { stream in
+ idString == stream.id.uuidString
+ }
+ }
+
+ func reloadWhepClient() {
+ stopWhepClient()
+ for stream in database.whepClient.streams where stream.enabled {
+ guard let url = URL(string: stream.url) else {
+ continue
+ }
+ let client = WhepClient(cameraId: stream.id, url: url, latency: stream.latencySeconds())
+ client.delegate = self
+ client.start()
+ ingests.whep.append(client)
+ }
+ }
+
+ func stopWhepClient() {
+ for client in ingests.whep {
+ client.stop()
+ }
+ ingests.whep = []
+ }
+
+ func whepClientConnectedInternal(cameraId: UUID) {
+ guard let stream = getWhepStream(id: cameraId) else {
+ return
+ }
+ let camera = stream.camera()
+ makeToast(title: String(localized: "\(camera) connected"))
+ media.addBufferedVideo(cameraId: cameraId, name: camera, latency: stream.latencySeconds())
+ media.addBufferedAudio(cameraId: cameraId, name: camera, latency: stream.latencySeconds())
+ }
+
+ func whepClientDisconnectedInternal(cameraId: UUID, reason: String) {
+ guard let stream = getWhepStream(id: cameraId) else {
+ return
+ }
+ makeToast(title: String(localized: "\(stream.camera()) disconnected"), subTitle: reason)
+ media.removeBufferedVideo(cameraId: cameraId)
+ media.removeBufferedAudio(cameraId: cameraId)
+ switchMicIfNeededAfterNetworkCameraChange()
+ }
+}
+
+extension Model: WhepClientDelegate {
+ func whepClientErrorToast(title: String) {
+ makeErrorToastMain(title: title)
+ }
+
+ func whepClientConnected(cameraId: UUID) {
+ DispatchQueue.main.async {
+ self.whepClientConnectedInternal(cameraId: cameraId)
+ }
+ }
+
+ func whepClientDisconnected(cameraId: UUID, reason: String) {
+ DispatchQueue.main.async {
+ self.whepClientDisconnectedInternal(cameraId: cameraId, reason: reason)
+ }
+ }
+
+ func whepClientOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) {
+ media.appendBufferedVideoSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+
+ func whepClientOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) {
+ media.appendBufferedAudioSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+}
+
diff --git a/Moblin/Various/Model/ModelWhipServer.swift b/Moblin/Various/Model/ModelWhipServer.swift
new file mode 100644
index 000000000..904c92b8b
--- /dev/null
+++ b/Moblin/Various/Model/ModelWhipServer.swift
@@ -0,0 +1,122 @@
+import AVFoundation
+import CoreMedia
+import Foundation
+
+extension Model {
+ func whipCameras() -> [(UUID, String)] {
+ return database.whipServer.streams.map { stream in
+ (stream.id, stream.camera())
+ }
+ }
+
+ func getWhipStream(id: UUID) -> SettingsWhipServerStream? {
+ return database.whipServer.streams.first { stream in
+ stream.id == id
+ }
+ }
+
+ func getWhipStream(idString: String) -> SettingsWhipServerStream? {
+ return database.whipServer.streams.first { stream in
+ idString == stream.id.uuidString
+ }
+ }
+
+ func getWhipStream(streamKey: String) -> SettingsWhipServerStream? {
+ return database.whipServer.streams.first { stream in
+ stream.streamKey == streamKey
+ }
+ }
+
+ func stopAllWhipStreams() {
+ for stream in database.whipServer.streams {
+ stopWhipServerStream(stream: stream, showToast: false)
+ }
+ }
+
+ func isWhipStreamConnected(streamKey: String) -> Bool {
+ return ingests.whip?.isStreamConnected(streamKey: streamKey) ?? false
+ }
+
+ func handleWhipServerPublishStart(streamKey: String) {
+ DispatchQueue.main.async {
+ guard let stream = self.getWhipStream(streamKey: streamKey) else {
+ return
+ }
+ let camera = stream.camera()
+ self.makeToast(title: String(localized: "\(camera) connected"))
+ // Cap latency for local WebRTC ingest. Values above 500ms cause audio buffer
+ // overflow and excessive video delay. Old saved settings may still have 2000ms.
+ let latency = min(Double(stream.latency) / 1000.0, 0.5)
+ self.media.addBufferedVideo(cameraId: stream.id, name: camera, latency: latency)
+ self.media.addBufferedAudio(cameraId: stream.id, name: camera, latency: latency)
+ }
+ }
+
+ func handleWhipServerPublishStop(streamKey: String, reason: String? = nil) {
+ DispatchQueue.main.async {
+ guard let stream = self.getWhipStream(streamKey: streamKey) else {
+ return
+ }
+ self.stopWhipServerStream(stream: stream, showToast: true, reason: reason)
+ self.switchMicIfNeededAfterNetworkCameraChange()
+ }
+ }
+
+ private func stopWhipServerStream(
+ stream: SettingsWhipServerStream,
+ showToast: Bool,
+ reason: String? = nil
+ ) {
+ if showToast {
+ makeToast(title: String(localized: "\(stream.camera()) disconnected"), subTitle: reason)
+ }
+ media.removeBufferedVideo(cameraId: stream.id)
+ media.removeBufferedAudio(cameraId: stream.id)
+ }
+
+ func handleWhipServerFrame(cameraId: UUID, sampleBuffer: CMSampleBuffer) {
+ media.appendBufferedVideoSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+
+ func handleWhipServerAudioBuffer(cameraId: UUID, sampleBuffer: CMSampleBuffer) {
+ media.appendBufferedAudioSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+
+ func stopWhipServer() {
+ ingests.whip?.stop()
+ ingests.whip = nil
+ stopAllWhipStreams()
+ }
+
+ func reloadWhipServer() {
+ stopWhipServer()
+ if database.whipServer.enabled {
+ ingests.whip = WhipServer(settings: database.whipServer.clone())
+ ingests.whip?.delegate = self
+ ingests.whip?.start()
+ }
+ }
+
+ func whipServerEnabled() -> Bool {
+ return database.whipServer.enabled
+ }
+}
+
+extension Model: WhipServerDelegate {
+ func whipServerOnPublishStart(streamKey: String) {
+ handleWhipServerPublishStart(streamKey: streamKey)
+ }
+
+ func whipServerOnPublishStop(streamKey: String, reason: String) {
+ handleWhipServerPublishStop(streamKey: streamKey, reason: reason)
+ }
+
+ func whipServerOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) {
+ handleWhipServerFrame(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+
+ func whipServerOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) {
+ handleWhipServerAudioBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer)
+ }
+}
+
diff --git a/Moblin/Various/Network/HttpServer.swift b/Moblin/Various/Network/HttpServer.swift
index e8128470d..09eea893b 100644
--- a/Moblin/Various/Network/HttpServer.swift
+++ b/Moblin/Various/Network/HttpServer.swift
@@ -6,8 +6,7 @@ private struct HttpRequestParseResult {
let path: String
let version: String
let headers: [(String, String)]
- // periphery:ignore
- let data: Data
+ let body: Data
}
private class HttpRequestParser: HttpParser {
@@ -28,17 +27,34 @@ private class HttpRequestParser: HttpParser {
return (true, nil)
}
var headers: [(String, String)] = []
+ var contentLength: Int = 0
while let (line, nextLineOffset) = getLine(data: data, offset: offset) {
- let parts = line.lowercased().split(separator: " ")
- if parts.count == 2 {
- headers.append((String(parts[0]), String(parts[1])))
+ if let colonIndex = line.firstIndex(of: ":") {
+ let key = line[..= contentLength {
+ return (true, HttpRequestParseResult(method: String(method),
+ path: String(path),
+ version: String(version),
+ headers: headers,
+ body: body.prefix(contentLength)))
+ }
+ return (false, nil)
}
offset = nextLineOffset
}
@@ -50,14 +66,15 @@ class HttpServerRequest {
let method: String
let path: String
let version: String
- // periphery:ignore
let headers: [(String, String)]
+ let body: Data
- fileprivate init(method: String, path: String, version: String, headers: [(String, String)]) {
+ fileprivate init(method: String, path: String, version: String, headers: [(String, String)], body: Data) {
self.method = method
self.path = path
self.version = version
self.headers = headers
+ self.body = body
}
fileprivate func getContentType() -> String {
@@ -78,18 +95,41 @@ class HttpServerRequest {
return "text/html"
}
}
+
+ func header(_ name: String) -> String? {
+ let key = name.lowercased()
+ return headers.first(where: { $0.0 == key })?.1
+ }
}
enum HttpServerStatus {
case ok
+ case created
+ case noContent
+ case badRequest
case notFound
+ case methodNotAllowed
+ case unsupportedMediaType
+ case internalServerError
func code() -> Int {
switch self {
case .ok:
return 200
+ case .created:
+ return 201
+ case .noContent:
+ return 204
+ case .badRequest:
+ return 400
case .notFound:
return 404
+ case .methodNotAllowed:
+ return 405
+ case .unsupportedMediaType:
+ return 415
+ case .internalServerError:
+ return 500
}
}
@@ -97,8 +137,20 @@ enum HttpServerStatus {
switch self {
case .ok:
return "OK"
+ case .created:
+ return "Created"
+ case .noContent:
+ return "No Content"
+ case .badRequest:
+ return "Bad Request"
case .notFound:
return "Not Found"
+ case .methodNotAllowed:
+ return "Method Not Allowed"
+ case .unsupportedMediaType:
+ return "Unsupported Media Type"
+ case .internalServerError:
+ return "Internal Server Error"
}
}
}
@@ -110,12 +162,22 @@ class HttpServerResponse {
self.connection = connection
}
- func send(data: Data, status: HttpServerStatus = .ok) {
- connection?.sendAndClose(status: status, content: data)
+ func send(
+ data: Data,
+ status: HttpServerStatus = .ok,
+ contentType: String? = nil,
+ headers: [(String, String)] = []
+ ) {
+ connection?.sendAndClose(status: status, contentType: contentType, headers: headers, content: data)
}
- func send(text: String, status: HttpServerStatus = .ok) {
- send(data: text.utf8Data, status: status)
+ func send(
+ text: String,
+ status: HttpServerStatus = .ok,
+ contentType: String? = nil,
+ headers: [(String, String)] = []
+ ) {
+ send(data: text.utf8Data, status: status, contentType: contentType, headers: headers)
}
}
@@ -156,22 +218,26 @@ private class HttpServerConnection {
request = HttpServerRequest(method: result.method,
path: result.path,
version: result.version,
- headers: result.headers)
+ headers: result.headers,
+ body: result.body)
guard let route = server.findRoute(request: request!) else {
- sendAndClose(status: .notFound, content: Data())
+ sendAndClose(status: .notFound, contentType: nil, headers: [], content: Data())
return
}
route.handler(request!, HttpServerResponse(connection: self))
}
- func sendAndClose(status: HttpServerStatus, content: Data) {
+ func sendAndClose(status: HttpServerStatus, contentType: String?, headers: [(String, String)], content: Data) {
guard let request else {
return
}
var lines: [String] = []
lines.append("\(request.version) \(status.code()) \(status.text())")
if !content.isEmpty {
- lines.append("Content-Type: \(request.getContentType())")
+ lines.append("Content-Type: \(contentType ?? request.getContentType())")
+ }
+ for header in headers {
+ lines.append("\(header.0): \(header.1)")
}
lines.append("Connection: close")
lines.append("")
@@ -198,7 +264,7 @@ class HttpServerRoute {
class HttpServer {
private let queue: DispatchQueue
- private let routes: [HttpServerRoute]
+ private var routes: [HttpServerRoute]
private var listener: NWListener?
private let retryTimer: SimpleTimer
private var port: NWEndpoint.Port = .http
@@ -210,6 +276,12 @@ class HttpServer {
retryTimer = SimpleTimer(queue: queue)
}
+ func setRoutes(_ routes: [HttpServerRoute]) {
+ queue.async {
+ self.routes = routes
+ }
+ }
+
func start(port: NWEndpoint.Port) {
logger.debug("http-server: Start")
queue.async {
diff --git a/Moblin/Various/Settings/Settings.swift b/Moblin/Various/Settings/Settings.swift
index 6246b8ee1..2df34d2d6 100644
--- a/Moblin/Various/Settings/Settings.swift
+++ b/Moblin/Various/Settings/Settings.swift
@@ -12,9 +12,11 @@ enum SettingsCameraId {
case back(id: String)
case front(id: String)
case rtmp(id: UUID)
+ case whip(id: UUID)
case srtla(id: UUID)
case rist(id: UUID)
case rtsp(id: UUID)
+ case whep(id: UUID)
case mediaPlayer(id: UUID)
case external(id: String, name: String)
case screenCapture
@@ -1107,6 +1109,7 @@ class Database: Codable, ObservableObject {
var quickButtonsGeneral: SettingsQuickButtons = .init()
@Published var quickButtons: [SettingsQuickButton] = []
var rtmpServer: SettingsRtmpServer = .init()
+ var whipServer: SettingsWhipServer = .init()
@Published var networkInterfaceNames: [SettingsNetworkInterfaceName] = []
@Published var lowBitrateWarning: Bool = true
@Published var vibrate: Bool = false
@@ -1158,6 +1161,7 @@ class Database: Codable, ObservableObject {
var ristServer: SettingsRistServer = .init()
var disconnectProtection: SettingsDisconnectProtection = .init()
var rtspClient: SettingsRtspClient = .init()
+ var whepClient: SettingsWhepClient = .init()
var navigation: SettingsNavigation = .init()
var wiFiAware: SettingsWiFiAware = .init()
var face: SettingsFace = .init()
@@ -1211,6 +1215,7 @@ class Database: Codable, ObservableObject {
quickButtons,
globalButtons,
rtmpServer,
+ whipServer,
networkInterfaceNames,
lowBitrateWarning,
vibrate,
@@ -1261,6 +1266,7 @@ class Database: Codable, ObservableObject {
ristServer,
disconnectProtection,
rtspClient,
+ whepClient,
navigation,
wiFiAware,
face,
@@ -1285,6 +1291,7 @@ class Database: Codable, ObservableObject {
try container.encode(.quickButtons, quickButtonsGeneral)
try container.encode(.globalButtons, quickButtons)
try container.encode(.rtmpServer, rtmpServer)
+ try container.encode(.whipServer, whipServer)
try container.encode(.networkInterfaceNames, networkInterfaceNames)
try container.encode(.lowBitrateWarning, lowBitrateWarning)
try container.encode(.vibrate, vibrate)
@@ -1335,6 +1342,7 @@ class Database: Codable, ObservableObject {
try container.encode(.ristServer, ristServer)
try container.encode(.disconnectProtection, disconnectProtection)
try container.encode(.rtspClient, rtspClient)
+ try container.encode(.whepClient, whepClient)
try container.encode(.navigation, navigation)
try container.encode(.wiFiAware, wiFiAware)
try container.encode(.face, face)
@@ -1365,6 +1373,7 @@ class Database: Codable, ObservableObject {
quickButtonsGeneral = container.decode(.quickButtons, SettingsQuickButtons.self, .init())
quickButtons = container.decode(.globalButtons, [SettingsQuickButton].self, [])
rtmpServer = container.decode(.rtmpServer, SettingsRtmpServer.self, .init())
+ whipServer = container.decode(.whipServer, SettingsWhipServer.self, .init())
networkInterfaceNames = container.decode(
.networkInterfaceNames,
[SettingsNetworkInterfaceName].self,
@@ -1444,6 +1453,7 @@ class Database: Codable, ObservableObject {
.init()
)
rtspClient = container.decode(.rtspClient, SettingsRtspClient.self, .init())
+ whepClient = container.decode(.whepClient, SettingsWhepClient.self, .init())
navigation = container.decode(.navigation, SettingsNavigation.self, .init())
wiFiAware = container.decode(.wiFiAware, SettingsWiFiAware.self, .init())
face = (try? container.decode(SettingsFace.self, forKey: .face)) ?? debug.faceToBeRemoved
@@ -1915,7 +1925,6 @@ private func addMissingDeepLinkQuickButtons(database: Database) {
let buttonExists = quickButtons.buttons.contains(where: { quickButton.type == $0.type })
if !buttonExists {
button.type = quickButton.type
- button.page = quickButton.page
quickButtons.buttons.append(button)
}
}
diff --git a/Moblin/Various/Settings/SettingsIngests.swift b/Moblin/Various/Settings/SettingsIngests.swift
index 2b01cc794..b4fe7f7aa 100644
--- a/Moblin/Various/Settings/SettingsIngests.swift
+++ b/Moblin/Various/Settings/SettingsIngests.swift
@@ -1,6 +1,7 @@
import Foundation
private let defaultRtmpLatency: Int32 = 2000
+private let defaultWhipLatency: Int32 = 200
class SettingsRtmpServerStream: Codable, Identifiable, ObservableObject, Named {
static let baseName = String(localized: "My stream")
@@ -86,6 +87,90 @@ class SettingsRtmpServer: Codable, ObservableObject {
}
}
+class SettingsWhipServerStream: Codable, Identifiable, ObservableObject, Named {
+ static let baseName = String(localized: "My stream")
+ var id: UUID = .init()
+ @Published var name: String = baseName
+ @Published var streamKey: String = ""
+ @Published var latency: Int32 = defaultWhipLatency
+
+ enum CodingKeys: CodingKey {
+ case id,
+ name,
+ streamKey,
+ latency
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(.id, id)
+ try container.encode(.name, name)
+ try container.encode(.streamKey, streamKey)
+ try container.encode(.latency, latency)
+ }
+
+ init() {}
+
+ required init(from decoder: Decoder) throws {
+ let container = try decoder.container(keyedBy: CodingKeys.self)
+ id = container.decode(.id, UUID.self, .init())
+ name = container.decode(.name, String.self, Self.baseName)
+ streamKey = container.decode(.streamKey, String.self, "")
+ latency = container.decode(.latency, Int32.self, defaultWhipLatency)
+ }
+
+ func camera() -> String {
+ return "WHIP \(name)"
+ }
+
+ func clone() -> SettingsWhipServerStream {
+ let new = SettingsWhipServerStream()
+ new.id = id
+ new.name = name
+ new.streamKey = streamKey
+ new.latency = latency
+ return new
+ }
+}
+
+class SettingsWhipServer: Codable, ObservableObject {
+ @Published var enabled: Bool = false
+ @Published var port: UInt16 = 8080
+ @Published var streams: [SettingsWhipServerStream] = []
+
+ enum CodingKeys: CodingKey {
+ case enabled,
+ port,
+ streams
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(.enabled, enabled)
+ try container.encode(.port, port)
+ try container.encode(.streams, streams)
+ }
+
+ init() {}
+
+ required init(from decoder: Decoder) throws {
+ let container = try decoder.container(keyedBy: CodingKeys.self)
+ enabled = container.decode(.enabled, Bool.self, false)
+ port = container.decode(.port, UInt16.self, 8080)
+ streams = container.decode(.streams, [SettingsWhipServerStream].self, [])
+ }
+
+ func clone() -> SettingsWhipServer {
+ let new = SettingsWhipServer()
+ new.enabled = enabled
+ new.port = port
+ for stream in streams {
+ new.streams.append(stream.clone())
+ }
+ return new
+ }
+}
+
class SettingsSrtlaServerStream: Codable, Identifiable, ObservableObject, Named {
static let baseName = String(localized: "My stream")
var id: UUID = .init()
@@ -307,3 +392,68 @@ class SettingsRtspClient: Codable, ObservableObject {
streams = container.decode(.streams, [SettingsRtspClientStream].self, [])
}
}
+
+class SettingsWhepClientStream: Codable, Identifiable, ObservableObject, Named {
+ static let baseName = String(localized: "My stream")
+ var id: UUID = .init()
+ @Published var name: String = baseName
+ @Published var url: String = ""
+ @Published var enabled: Bool = false
+ @Published var latency: Int32 = 200
+
+ enum CodingKeys: CodingKey {
+ case id,
+ name,
+ url,
+ enabled,
+ latency
+ }
+
+ func latencySeconds() -> Double {
+ return Double(latency) / 1000
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(.id, id)
+ try container.encode(.name, name)
+ try container.encode(.url, url)
+ try container.encode(.enabled, enabled)
+ try container.encode(.latency, latency)
+ }
+
+ init() {}
+
+ required init(from decoder: Decoder) throws {
+ let container = try decoder.container(keyedBy: CodingKeys.self)
+ id = container.decode(.id, UUID.self, .init())
+ name = container.decode(.name, String.self, Self.baseName)
+ url = container.decode(.url, String.self, "")
+ enabled = container.decode(.enabled, Bool.self, false)
+ latency = container.decode(.latency, Int32.self, 2000)
+ }
+
+ func camera() -> String {
+ return whepCamera(name: name)
+ }
+}
+
+class SettingsWhepClient: Codable, ObservableObject {
+ @Published var streams: [SettingsWhepClientStream] = []
+
+ enum CodingKeys: CodingKey {
+ case streams
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(.streams, streams)
+ }
+
+ init() {}
+
+ required init(from decoder: Decoder) throws {
+ let container = try decoder.container(keyedBy: CodingKeys.self)
+ streams = container.decode(.streams, [SettingsWhepClientStream].self, [])
+ }
+}
diff --git a/Moblin/Various/Settings/SettingsScene.swift b/Moblin/Various/Settings/SettingsScene.swift
index 4644548e1..66758760e 100644
--- a/Moblin/Various/Settings/SettingsScene.swift
+++ b/Moblin/Various/Settings/SettingsScene.swift
@@ -774,14 +774,14 @@ class SettingsWidgetCrop: Codable {
}
enum SettingsWidgetBrowserMode: String, Codable, CaseIterable {
- case periodicAudioAndVideo
- case audioAndVideoOnly
- case audioOnly
+ case periodicAudioAndVideo = "periodicAudioAndVideo"
+ case audioAndVideoOnly = "audioAndVideoOnly"
+ case audioOnly = "audioOnly"
func toString() -> String {
switch self {
case .periodicAudioAndVideo:
- return String(localized: "Periodic, audio and video")
+ return String(localized: "Periodic audio and video")
case .audioAndVideoOnly:
return String(localized: "Audio and video only")
case .audioOnly:
@@ -828,8 +828,8 @@ class SettingsWidgetBrowser: Codable, ObservableObject {
url = container.decode(.url, String.self, "")
width = container.decode(.width, Int.self, 500)
height = container.decode(.height, Int.self, 500)
- if let decodedMode = try? container.decode(SettingsWidgetBrowserMode.self, forKey: .mode) {
- mode = decodedMode
+ if let modeValue = try? container.decodeIfPresent(SettingsWidgetBrowserMode.self, forKey: .mode) {
+ mode = modeValue
} else {
let audioOnly = container.decode(.audioOnly, Bool.self, false)
mode = audioOnly ? .audioAndVideoOnly : .periodicAudioAndVideo
@@ -1503,9 +1503,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject {
backCameraId,
frontCameraId,
rtmpCameraId,
+ whipCameraId,
srtlaCameraId,
ristCameraId,
rtspCameraId,
+ whepCameraId,
mediaPlayerCameraId,
externalCameraId,
externalCameraName,
@@ -1524,9 +1526,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject {
try container.encode(.backCameraId, videoSource.backCameraId)
try container.encode(.frontCameraId, videoSource.frontCameraId)
try container.encode(.rtmpCameraId, videoSource.rtmpCameraId)
+ try container.encode(.whipCameraId, videoSource.whipCameraId)
try container.encode(.srtlaCameraId, videoSource.srtlaCameraId)
try container.encode(.ristCameraId, videoSource.ristCameraId)
try container.encode(.rtspCameraId, videoSource.rtspCameraId)
+ try container.encode(.whepCameraId, videoSource.whepCameraId)
try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId)
try container.encode(.externalCameraId, videoSource.externalCameraId)
try container.encode(.externalCameraName, videoSource.externalCameraName)
@@ -1543,9 +1547,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject {
videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId)
videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId)
videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init())
+ videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init())
videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init())
videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init())
videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init())
+ videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init())
videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init())
videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "")
videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "")
@@ -1576,9 +1582,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject {
backCameraId,
frontCameraId,
rtmpCameraId,
+ whipCameraId,
srtlaCameraId,
ristCameraId,
rtspCameraId,
+ whepCameraId,
mediaPlayerCameraId,
externalCameraId,
externalCameraName,
@@ -1595,9 +1603,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject {
try container.encode(.backCameraId, videoSource.backCameraId)
try container.encode(.frontCameraId, videoSource.frontCameraId)
try container.encode(.rtmpCameraId, videoSource.rtmpCameraId)
+ try container.encode(.whipCameraId, videoSource.whipCameraId)
try container.encode(.srtlaCameraId, videoSource.srtlaCameraId)
try container.encode(.ristCameraId, videoSource.ristCameraId)
try container.encode(.rtspCameraId, videoSource.rtspCameraId)
+ try container.encode(.whepCameraId, videoSource.whepCameraId)
try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId)
try container.encode(.externalCameraId, videoSource.externalCameraId)
try container.encode(.externalCameraName, videoSource.externalCameraName)
@@ -1612,9 +1622,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject {
videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId)
videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId)
videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init())
+ videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init())
videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init())
videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init())
videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init())
+ videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init())
videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init())
videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "")
videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "")
@@ -2280,10 +2292,12 @@ enum SettingsSceneCameraPosition: String, Codable, CaseIterable {
case back = "Back"
case front = "Front"
case rtmp = "RTMP"
+ case whip = "WHIP"
case external = "External"
case srtla = "SRT(LA)"
case rist = "RIST"
case rtsp = "RTSP"
+ case whep = "WHEP"
case mediaPlayer = "Media player"
case screenCapture = "Screen capture"
case backTripleLowEnergy = "Back triple"
@@ -2314,9 +2328,11 @@ struct SettingsVideoSource {
var backCameraId: String = bestBackCameraId
var frontCameraId: String = bestFrontCameraId
var rtmpCameraId: UUID = .init()
+ var whipCameraId: UUID = .init()
var srtlaCameraId: UUID = .init()
var ristCameraId: UUID = .init()
var rtspCameraId: UUID = .init()
+ var whepCameraId: UUID = .init()
var mediaPlayerCameraId: UUID = .init()
var externalCameraId: String = ""
var externalCameraName: String = ""
@@ -2329,6 +2345,8 @@ struct SettingsVideoSource {
return .front(id: frontCameraId)
case .rtmp:
return .rtmp(id: rtmpCameraId)
+ case .whip:
+ return .whip(id: whipCameraId)
case .external:
return .external(id: externalCameraId, name: externalCameraName)
case .srtla:
@@ -2337,6 +2355,8 @@ struct SettingsVideoSource {
return .rist(id: ristCameraId)
case .rtsp:
return .rtsp(id: rtspCameraId)
+ case .whep:
+ return .whep(id: whepCameraId)
case .mediaPlayer:
return .mediaPlayer(id: mediaPlayerCameraId)
case .screenCapture:
@@ -2363,6 +2383,9 @@ struct SettingsVideoSource {
case let .rtmp(id: id):
cameraPosition = .rtmp
rtmpCameraId = id
+ case let .whip(id: id):
+ cameraPosition = .whip
+ whipCameraId = id
case let .srtla(id: id):
cameraPosition = .srtla
srtlaCameraId = id
@@ -2372,6 +2395,9 @@ struct SettingsVideoSource {
case let .rtsp(id: id):
cameraPosition = .rtsp
rtspCameraId = id
+ case let .whep(id: id):
+ cameraPosition = .whep
+ whepCameraId = id
case let .mediaPlayer(id: id):
cameraPosition = .mediaPlayer
mediaPlayerCameraId = id
@@ -2428,12 +2454,16 @@ struct SettingsVideoSource {
switch cameraPosition {
case .rtmp:
return cameraId == rtmpCameraId
+ case .whip:
+ return cameraId == whipCameraId
case .srtla:
return cameraId == srtlaCameraId
case .rist:
return cameraId == ristCameraId
case .rtsp:
return cameraId == rtspCameraId
+ case .whep:
+ return cameraId == whepCameraId
default:
return false
}
@@ -2462,9 +2492,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject {
backCameraId,
frontCameraId,
rtmpCameraId,
+ whipCameraId,
srtlaCameraId,
ristCameraId,
rtspCameraId,
+ whepCameraId,
mediaPlayerCameraId,
externalCameraId,
externalCameraName,
@@ -2492,9 +2524,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject {
try container.encode(.backCameraId, videoSource.backCameraId)
try container.encode(.frontCameraId, videoSource.frontCameraId)
try container.encode(.rtmpCameraId, videoSource.rtmpCameraId)
+ try container.encode(.whipCameraId, videoSource.whipCameraId)
try container.encode(.srtlaCameraId, videoSource.srtlaCameraId)
try container.encode(.ristCameraId, videoSource.ristCameraId)
try container.encode(.rtspCameraId, videoSource.rtspCameraId)
+ try container.encode(.whepCameraId, videoSource.whepCameraId)
try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId)
try container.encode(.externalCameraId, videoSource.externalCameraId)
try container.encode(.externalCameraName, videoSource.externalCameraName)
@@ -2518,9 +2552,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject {
videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId)
videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId)
videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init())
+ videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init())
videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init())
videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init())
videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init())
+ videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init())
videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init())
videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "")
videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "")
@@ -2809,7 +2845,7 @@ class SettingsWidgetScoreboardClock: Codable, ObservableObject {
@Published var direction: SettingsWidgetGenericScoreboardClockDirection = .up
var minutes: Int = 0
var seconds: Int = 0
- @Published var isStopped: Bool = true
+ var isStopped: Bool = true
enum CodingKeys: CodingKey {
case maximum,
@@ -3250,9 +3286,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named {
backCameraId,
frontCameraId,
rtmpCameraId,
+ whipCameraId,
srtlaCameraId,
ristCameraId,
rtspCameraId,
+ whepCameraId,
mediaPlayerCameraId,
externalCameraId,
externalCameraName,
@@ -3275,9 +3313,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named {
try container.encode(.backCameraId, videoSource.backCameraId)
try container.encode(.frontCameraId, videoSource.frontCameraId)
try container.encode(.rtmpCameraId, videoSource.rtmpCameraId)
+ try container.encode(.whipCameraId, videoSource.whipCameraId)
try container.encode(.srtlaCameraId, videoSource.srtlaCameraId)
try container.encode(.ristCameraId, videoSource.ristCameraId)
try container.encode(.rtspCameraId, videoSource.rtspCameraId)
+ try container.encode(.whepCameraId, videoSource.whepCameraId)
try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId)
try container.encode(.externalCameraId, videoSource.externalCameraId)
try container.encode(.externalCameraName, videoSource.externalCameraName)
@@ -3304,9 +3344,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named {
videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId)
videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId)
videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init())
+ videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init())
videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init())
videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init())
videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init())
+ videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init())
videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init())
videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "")
videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "")
diff --git a/Moblin/Various/Settings/SettingsStream.swift b/Moblin/Various/Settings/SettingsStream.swift
index 6c09e3517..758c466cb 100644
--- a/Moblin/Various/Settings/SettingsStream.swift
+++ b/Moblin/Various/Settings/SettingsStream.swift
@@ -151,6 +151,7 @@ enum SettingsStreamProtocol: String, Codable {
case rtmp = "RTMP"
case srt = "SRT"
case rist = "RIST"
+ case whip = "WHIP"
init(from decoder: Decoder) throws {
self = try SettingsStreamProtocol(rawValue: decoder.singleValueContainer().decode(RawValue.self)) ??
@@ -164,6 +165,7 @@ enum SettingsStreamDetailedProtocol {
case srt
case srtla
case rist
+ case whip
}
class SettingsStreamSrtConnectionPriority: Codable, Identifiable {
@@ -548,6 +550,20 @@ class SettingsStreamRist: Codable {
}
}
+class SettingsStreamWhip: Codable {
+ var iceServers: [String] = []
+ var maxRetryCount: Int = 0
+ var insecureHttpAllowed: Bool = false
+
+ func clone() -> SettingsStreamWhip {
+ let new = SettingsStreamWhip()
+ new.iceServers = iceServers
+ new.maxRetryCount = maxRetryCount
+ new.insecureHttpAllowed = insecureHttpAllowed
+ return new
+ }
+}
+
class SettingsStreamChat: Codable {
var bttvEmotes: Bool = false
var ffzEmotes: Bool = false
@@ -1028,6 +1044,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
var srt: SettingsStreamSrt = .init()
var rtmp: SettingsStreamRtmp = .init()
var rist: SettingsStreamRist = .init()
+ var whip: SettingsStreamWhip = .init()
@Published var maxKeyFrameInterval: Int32 = 2
@Published var audioCodec: SettingsStreamAudioCodec = .aac
var audioBitrate: Int = 128_000
@@ -1113,6 +1130,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
srt,
rtmp,
rist,
+ whip,
captureSessionPresetEnabled,
captureSessionPreset,
maxKeyFrameInterval,
@@ -1197,6 +1215,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
try container.encode(.srt, srt)
try container.encode(.rtmp, rtmp)
try container.encode(.rist, rist)
+ try container.encode(.whip, whip)
try container.encode(.maxKeyFrameInterval, maxKeyFrameInterval)
try container.encode(.audioCodec, audioCodec)
try container.encode(.audioBitrate, audioBitrate)
@@ -1290,6 +1309,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
srt = container.decode(.srt, SettingsStreamSrt.self, .init())
rtmp = container.decode(.rtmp, SettingsStreamRtmp.self, .init())
rist = container.decode(.rist, SettingsStreamRist.self, .init())
+ whip = container.decode(.whip, SettingsStreamWhip.self, .init())
maxKeyFrameInterval = container.decode(.maxKeyFrameInterval, Int32.self, 2)
audioCodec = container.decode(.audioCodec, SettingsStreamAudioCodec.self, .aac)
audioBitrate = container.decode(.audioBitrate, Int.self, 128_000)
@@ -1374,6 +1394,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
new.srt = srt.clone()
new.rtmp = rtmp.clone()
new.rist = rist.clone()
+ new.whip = whip.clone()
new.maxKeyFrameInterval = maxKeyFrameInterval
new.audioCodec = audioCodec
new.audioBitrate = audioBitrate
@@ -1404,12 +1425,20 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
return .rtmp
case "rtmps":
return .rtmp
+ case "http":
+ return .whip
+ case "https":
+ return .whip
case "srt":
return .srt
case "srtla":
return .srt
case "rist":
return .rist
+ case "whip":
+ return .whip
+ case "whips":
+ return .whip
default:
return .rtmp
}
@@ -1421,17 +1450,43 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named
return .rtmp
case "rtmps":
return .rtmps
+ case "http":
+ return .whip
+ case "https":
+ return .whip
case "srt":
return .srt
case "srtla":
return .srtla
case "rist":
return .rist
+ case "whip":
+ return .whip
+ case "whips":
+ return .whip
default:
return .rtmp
}
}
+ func resolvedWhipEndpointUrl() -> URL? {
+ guard getProtocol() == .whip else {
+ return nil
+ }
+ guard var components = URLComponents(string: url) else {
+ return nil
+ }
+ switch components.scheme {
+ case "whip", "whips":
+ components.scheme = "https"
+ return components.url
+ case "http", "https":
+ return components.url
+ default:
+ return nil
+ }
+ }
+
func protocolString() -> String {
if getProtocol() == .srt && isSrtla() {
return "SRTLA"
diff --git a/Moblin/View/Settings/Ingests/IngestsSettingsView.swift b/Moblin/View/Settings/Ingests/IngestsSettingsView.swift
index f0f8b9ff7..14ff283ae 100644
--- a/Moblin/View/Settings/Ingests/IngestsSettingsView.swift
+++ b/Moblin/View/Settings/Ingests/IngestsSettingsView.swift
@@ -8,9 +8,11 @@ struct IngestsSettingsView: View {
Form {
Section {
RtmpServerSettingsView(rtmpServer: database.rtmpServer)
+ WhipServerSettingsView(whipServer: database.whipServer)
SrtlaServerSettingsView(srtlaServer: database.srtlaServer)
RistServerSettingsView(ristServer: database.ristServer)
RtspClientSettingsView(rtspClient: database.rtspClient)
+ WhepClientSettingsView(whepClient: database.whepClient)
if #available(iOS 26, *), false {
NavigationLink {
WiFiAwareSettingsView(model: model, wiFiAware: database.wiFiAware)
diff --git a/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift b/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift
index ff729a036..683f95a94 100644
--- a/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift
+++ b/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift
@@ -270,6 +270,12 @@ struct StreamSettingsView: View {
} label: {
Text("RIST")
}
+ case .whip:
+ NavigationLink {
+ StreamWhipSettingsView(stream: stream)
+ } label: {
+ Text("WHIP")
+ }
}
}
} header: {
diff --git a/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift
index c2af6c43c..8d3f38c30 100644
--- a/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift
+++ b/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift
@@ -108,6 +108,22 @@ private struct SrtHelpView: View {
}
}
+private struct WhipHelpView: View {
+ var body: some View {
+ Section {
+ VStack(alignment: .leading) {
+ Text("Template: https://my_domain/my_endpoint")
+ Text("Example: https://example.com/live/whip")
+ Text("Example: http://192.168.1.50:8080/live/whip")
+ Text("")
+ Text("WHIP uses HTTP/HTTPS endpoints. (whip:// is also accepted for compatibility.)")
+ }
+ } header: {
+ Text("WHIP")
+ }
+ }
+}
+
private struct UrlSettingsView: View {
@EnvironmentObject var model: Model
@Environment(\.dismiss) var dismiss
@@ -172,6 +188,7 @@ private struct UrlSettingsView: View {
RtmpHelpView(stream: stream)
if showSrtHelp {
SrtHelpView()
+ WhipHelpView()
}
}
.navigationTitle("Help")
diff --git a/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift
new file mode 100644
index 000000000..02a0543d1
--- /dev/null
+++ b/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift
@@ -0,0 +1,60 @@
+import SwiftUI
+
+struct StreamWhipSettingsView: View {
+ @EnvironmentObject var model: Model
+ let stream: SettingsStream
+
+ private func iceServersString() -> String {
+ stream.whip.iceServers.joined(separator: "\n")
+ }
+
+ private func parseIceServers(_ value: String) -> [String] {
+ value
+ .split(whereSeparator: { $0 == "\n" || $0 == "," })
+ .map { String($0).trim() }
+ .filter { !$0.isEmpty }
+ }
+
+ var body: some View {
+ Form {
+ Section {
+ MultiLineTextFieldNavigationView(
+ title: String(localized: "ICE servers"),
+ value: iceServersString(),
+ onSubmit: { value in
+ stream.whip.iceServers = parseIceServers(value)
+ model.reloadStreamIfEnabled(stream: stream)
+ },
+ footers: [
+ String(localized: "Enter STUN/TURN URLs, one per line."),
+ String(localized: "Example: stun:stun.l.google.com:19302"),
+ String(localized: "Note: Custom ICE servers may be ignored depending on WHIP backend."),
+ ]
+ )
+
+ NavigationLink {
+ TextEditView(
+ title: String(localized: "Max retries"),
+ value: String(stream.whip.maxRetryCount),
+ keyboardType: .numberPad
+ ) { value in
+ guard let retry = Int(value), retry >= 0, retry <= 20 else {
+ return
+ }
+ stream.whip.maxRetryCount = retry
+ model.reloadStreamIfEnabled(stream: stream)
+ }
+ } label: {
+ TextItemView(
+ name: String(localized: "Max retries"),
+ value: String(stream.whip.maxRetryCount),
+ color: .gray
+ )
+ }
+ .disabled(stream.enabled && model.isLive)
+ }
+ }
+ .navigationTitle("WHIP")
+ }
+}
+
diff --git a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift
index e86c4567f..74a37674d 100644
--- a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift
+++ b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift
@@ -22,6 +22,11 @@ struct StreamWizardCustomSettingsView: View {
} label: {
Text("RIST")
}
+ NavigationLink {
+ StreamWizardCustomWhipSettingsView(model: model, createStreamWizard: createStreamWizard)
+ } label: {
+ Text("WHIP")
+ }
} header: {
Text("Protocol")
}
diff --git a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift
new file mode 100644
index 000000000..b4497c9cf
--- /dev/null
+++ b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift
@@ -0,0 +1,58 @@
+import SwiftUI
+
+struct StreamWizardCustomWhipSettingsView: View {
+ let model: Model
+ @ObservedObject var createStreamWizard: CreateStreamWizard
+ @State var urlError = ""
+
+ private func nextDisabled() -> Bool {
+ return createStreamWizard.customWhipUrl.isEmpty || !urlError.isEmpty
+ }
+
+ private func updateUrlError() {
+ let url = cleanUrl(url: createStreamWizard.customWhipUrl)
+ if url.isEmpty {
+ urlError = ""
+ } else {
+ urlError = isValidUrl(url: url, allowedSchemes: ["http", "https", "whip", "whips"]) ?? ""
+ }
+ }
+
+ var body: some View {
+ Form {
+ Section {
+ TextField("https://example.com/live/whip", text: $createStreamWizard.customWhipUrl)
+ .textInputAutocapitalization(.never)
+ .disableAutocorrection(true)
+ .onChange(of: createStreamWizard.customWhipUrl) { _ in
+ updateUrlError()
+ }
+ } header: {
+ Text("Url")
+ } footer: {
+ FormFieldError(error: urlError)
+ }
+
+ Section {
+ NavigationLink {
+ StreamWizardGeneralSettingsView(model: model, createStreamWizard: createStreamWizard)
+ } label: {
+ WizardNextButtonView()
+ }
+ .disabled(nextDisabled())
+ }
+ }
+ .onAppear {
+ createStreamWizard.customProtocol = .whip
+ createStreamWizard.name = makeUniqueName(
+ name: String(localized: "Custom WHIP"),
+ existingNames: model.database.streams
+ )
+ }
+ .navigationTitle("WHIP")
+ .toolbar {
+ CreateStreamWizardToolbar(createStreamWizard: createStreamWizard)
+ }
+ }
+}
+
diff --git a/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift b/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift
new file mode 100644
index 000000000..9d65a811d
--- /dev/null
+++ b/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift
@@ -0,0 +1,50 @@
+import SwiftUI
+
+struct WhepClientSettingsView: View {
+ @EnvironmentObject var model: Model
+ @ObservedObject var whepClient: SettingsWhepClient
+ @State var numberOfEnabledStreams: Int = 0
+
+ private func status() -> String {
+ return String(numberOfEnabledStreams)
+ }
+
+ var body: some View {
+ NavigationLink {
+ Form {
+ Section {
+ List {
+ ForEach(whepClient.streams) { stream in
+ WhepClientStreamSettingsView(whepClient: whepClient, stream: stream)
+ }
+ .onDelete { indexes in
+ whepClient.streams.remove(atOffsets: indexes)
+ model.reloadWhepClient()
+ }
+ }
+ CreateButtonView {
+ let stream = SettingsWhepClientStream()
+ stream.name = makeUniqueName(name: SettingsWhepClientStream.baseName,
+ existingNames: whepClient.streams)
+ whepClient.streams.append(stream)
+ }
+ } header: {
+ Text("Streams")
+ } footer: {
+ SwipeLeftToDeleteHelpView(kind: String(localized: "a stream"))
+ }
+ }
+ .navigationTitle("WHEP client")
+ } label: {
+ HStack {
+ Text("WHEP client")
+ Spacer()
+ GrayTextView(text: status())
+ }
+ }
+ .onAppear {
+ numberOfEnabledStreams = whepClient.streams.filter { $0.enabled }.count
+ }
+ }
+}
+
diff --git a/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift b/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift
new file mode 100644
index 000000000..9b1133892
--- /dev/null
+++ b/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift
@@ -0,0 +1,73 @@
+import SwiftUI
+
+struct WhepClientStreamSettingsView: View {
+ @EnvironmentObject var model: Model
+ @ObservedObject var whepClient: SettingsWhepClient
+ @ObservedObject var stream: SettingsWhepClientStream
+
+ var body: some View {
+ NavigationLink {
+ Form {
+ Section {
+ NameEditView(name: $stream.name, existingNames: whepClient.streams)
+ }
+ Section {
+ TextEditNavigationView(
+ title: String(localized: "URL"),
+ value: stream.url,
+ onSubmit: {
+ stream.url = $0
+ model.reloadWhepClient()
+ },
+ footers: [
+ "https://example.com/whep/myStream",
+ "http://192.168.1.10:8080/whep/myStream",
+ ],
+ placeholder: "https://example.com/whep/myStream"
+ )
+ }
+ Section {
+ TextEditNavigationView(
+ title: String(localized: "Latency"),
+ value: String(stream.latency),
+ onChange: {
+ guard let latency = Int32($0) else {
+ return String(localized: "Not a number")
+ }
+ guard latency >= 250 else {
+ return String(localized: "Too small")
+ }
+ guard latency <= 10000 else {
+ return String(localized: "Too big")
+ }
+ return nil
+ },
+ onSubmit: {
+ guard let latency = Int32($0) else {
+ return
+ }
+ stream.latency = latency
+ model.reloadWhepClient()
+ },
+ footers: [String(localized: "250 or more milliseconds. 2000 ms by default.")],
+ keyboardType: .numbersAndPunctuation,
+ valueFormat: { "\($0) ms" }
+ )
+ } footer: {
+ Text("The higher, the lower risk of stuttering.")
+ }
+ }
+ .navigationTitle("Stream")
+ } label: {
+ Toggle(isOn: $stream.enabled) {
+ HStack {
+ Text(stream.name)
+ }
+ }
+ .onChange(of: stream.enabled) { _ in
+ model.reloadWhepClient()
+ }
+ }
+ }
+}
+
diff --git a/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift b/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift
new file mode 100644
index 000000000..51f3391e1
--- /dev/null
+++ b/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift
@@ -0,0 +1,111 @@
+import SwiftUI
+
+struct WhipServerSettingsView: View {
+ @EnvironmentObject var model: Model
+ @ObservedObject var whipServer: SettingsWhipServer
+
+ private func submitPort(value: String) {
+ guard let port = UInt16(value) else {
+ return
+ }
+ whipServer.port = port
+ model.reloadWhipServer()
+ }
+
+ private func status() -> String {
+ if whipServer.enabled {
+ return String(whipServer.streams.count)
+ } else {
+ return "0"
+ }
+ }
+
+ var body: some View {
+ NavigationLink {
+ Form {
+ Section {
+ Text("""
+ The WHIP server allows Moblin to receive video streams over the network using WebRTC (WHIP).
+ """)
+ }
+ Section {
+ Toggle("Enabled", isOn: $whipServer.enabled)
+ .onChange(of: whipServer.enabled) { _ in
+ model.reloadWhipServer()
+ }
+ }
+ if whipServer.enabled {
+ Section {
+ HStack {
+ Image(systemName: "info.circle.fill")
+ .foregroundStyle(.blue)
+ Text("Disable the WHIP server to change its settings.")
+ }
+ }
+ }
+ Section {
+ TextEditNavigationView(
+ title: String(localized: "Port"),
+ value: String(whipServer.port),
+ onChange: isValidPort,
+ onSubmit: submitPort,
+ keyboardType: .numbersAndPunctuation
+ )
+ .disabled(whipServer.enabled)
+ } footer: {
+ Text("The TCP port the WHIP server listens for publishers on.")
+ }
+ Section {
+ List {
+ let list = ForEach(whipServer.streams) { stream in
+ WhipServerStreamSettingsView(
+ status: model.statusOther,
+ whipServer: whipServer,
+ stream: stream
+ )
+ }
+ if !whipServer.enabled {
+ list.onDelete { indexes in
+ whipServer.streams.remove(atOffsets: indexes)
+ model.reloadWhipServer()
+ model.updateMicsListAsync()
+ }
+ } else {
+ list
+ }
+ }
+ CreateButtonView {
+ let stream = SettingsWhipServerStream()
+ stream.name = makeUniqueName(name: SettingsWhipServerStream.baseName,
+ existingNames: whipServer.streams)
+ while true {
+ stream.streamKey = randomHumanString()
+ if model.getWhipStream(streamKey: stream.streamKey) == nil {
+ break
+ }
+ }
+ whipServer.streams.append(stream)
+ model.updateMicsListAsync()
+ }
+ .disabled(model.whipServerEnabled())
+ } header: {
+ Text("Streams")
+ } footer: {
+ VStack(alignment: .leading) {
+ Text("Each stream can receive video from one WHIP publisher on the local network.")
+ Text("")
+ SwipeLeftToDeleteHelpView(kind: String(localized: "a stream"))
+ }
+ }
+ }
+ .navigationTitle("WHIP server")
+ } label: {
+ HStack {
+ Text("WHIP server")
+ Spacer()
+ GrayTextView(text: status())
+ }
+ }
+ }
+}
+
diff --git a/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift b/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift
new file mode 100644
index 000000000..e92fc9c74
--- /dev/null
+++ b/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift
@@ -0,0 +1,124 @@
+import Network
+import SwiftUI
+
+private struct UrlsView: View {
+ @ObservedObject var status: StatusOther
+ let port: UInt16
+ let streamKey: String
+
+ private func formatUrl(ip: String) -> String {
+ return "http://\(ip):\(port)/whip/\(streamKey)"
+ }
+
+ var body: some View {
+ NavigationLink {
+ Form {
+ UrlsIpv4View(status: status, formatUrl: formatUrl)
+ UrlsIpv6View(status: status, formatUrl: formatUrl)
+ }
+ .navigationTitle("URLs")
+ } label: {
+ Text("URLs")
+ }
+ }
+}
+
+struct WhipServerStreamSettingsView: View {
+ @EnvironmentObject var model: Model
+ @ObservedObject var status: StatusOther
+ @ObservedObject var whipServer: SettingsWhipServer
+ @ObservedObject var stream: SettingsWhipServerStream
+
+ private func changeStreamKey(value: String) -> String? {
+ if model.getWhipStream(streamKey: value.trim()) == nil {
+ return nil
+ }
+ return String(localized: "Already in use")
+ }
+
+ private func submitStreamKey(value: String) {
+ let streamKey = value.trim()
+ if model.getWhipStream(streamKey: streamKey) != nil {
+ return
+ }
+ stream.streamKey = streamKey
+ }
+
+ private func changeLatency(value: String) -> String? {
+ guard let latency = Int32(value) else {
+ return String(localized: "Not a number")
+ }
+ guard latency >= 250 else {
+ return String(localized: "Too small")
+ }
+ guard latency <= 10000 else {
+ return String(localized: "Too big")
+ }
+ return nil
+ }
+
+ private func submitLatency(value: String) {
+ guard let latency = Int32(value) else {
+ return
+ }
+ stream.latency = latency
+ }
+
+ var body: some View {
+ NavigationLink {
+ Form {
+ Section {
+ NameEditView(name: $stream.name, existingNames: whipServer.streams)
+ .disabled(model.whipServerEnabled())
+ TextEditNavigationView(
+ title: String(localized: "Stream key"),
+ value: stream.streamKey,
+ onChange: changeStreamKey,
+ onSubmit: submitStreamKey
+ )
+ .disabled(model.whipServerEnabled())
+ } footer: {
+ Text("The stream name is shown in the list of cameras in scene settings.")
+ }
+ Section {
+ TextEditNavigationView(
+ title: String(localized: "Latency"),
+ value: String(stream.latency),
+ onChange: changeLatency,
+ onSubmit: submitLatency,
+ footers: [String(localized: "250 or more milliseconds. 2000 ms by default.")],
+ keyboardType: .numbersAndPunctuation,
+ valueFormat: { "\($0) ms" }
+ )
+ .disabled(model.whipServerEnabled())
+ } footer: {
+ Text("The higher, the lower risk of stuttering.")
+ }
+ Section {
+ UrlsView(status: status, port: whipServer.port, streamKey: stream.streamKey)
+ } header: {
+ Text("Publish URLs")
+ } footer: {
+ VStack(alignment: .leading) {
+ Text("""
+ Enter one of the URLs into the WHIP publisher device to send video \
+ to this stream. Usually enter the WiFi or Personal Hotspot URL.
+ """)
+ }
+ }
+ }
+ .navigationTitle("Stream")
+ } label: {
+ HStack {
+ if model.isWhipStreamConnected(streamKey: stream.streamKey) {
+ Image(systemName: "cable.connector")
+ } else {
+ Image(systemName: "cable.connector.slash")
+ }
+ Text(stream.name)
+ Spacer()
+ }
+ }
+ }
+}
+
diff --git a/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..e5df7ee13
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,70 @@
+name: Bug Report
+description: |
+ Before posting, confirm whether the issue can be reproduced in the Example project as well.
+body:
+ - type: markdown
+ attributes:
+ value: |
+ * Please review the [support guidelines](https://github.com/HaishinKit/.github/blob/main/SUPPORT.md) first. If the report lacks sufficient details, it will be closed.
+ * Development for version 1.x.x has ended. Issues are no longer accepted.
+ - type: textarea
+ id: observed-behavior
+ attributes:
+ label: Observed behavior
+ description: Explain what happened.
+ validations:
+ required: true
+ - type: textarea
+ id: expected-behavior
+ attributes:
+ label: Expected behavior
+ description: Explain what you expect.
+ validations:
+ required: true
+ - type: textarea
+ id: to-reproduce
+ attributes:
+ label: To Reproduce
+ description: Steps to reproduce the behavior
+ placeholder: |
+ 1. Go to '...'
+ 2. Click on '....'
+ 3. Scroll down to '....'
+ 4. See error
+ validations:
+ required: true
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: What version of our software are you running?
+ validations:
+ required: true
+ - type: textarea
+ id: smartphone-info
+ attributes:
+ label: Smartphone info.
+ description: please complete the following information
+ placeholder: |
+ - Device: [e.g. iPhone 15 Pro]
+ - OS: [e.g. iOS 18.1]
+ - type: textarea
+ id: addditional-context
+ attributes:
+ label: Additional context
+ description: Add any other context about the problem here. If you encounter issues while using HaishinKit for streaming or viewing, please provide the server name as well as the name of the software you are using for streaming or viewing.
+ placeholder: |
+ - Server: [e.g. nginx + rtmp module, YouTube]
+ - Ingest software: [e.g. OBS RTMP H264(main) AAC]
+ - Playback software: [e.g. iOS AVAudioPlayer(via HLS)]
+ - type: textarea
+ id: screenshot
+ attributes:
+ label: Screenshots
+ description: If applicable, add screenshots to help explain your problem.
+ - type: textarea
+ id: logs
+ attributes:
+ label: Relevant log output
+ description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
+ render: shell
diff --git a/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..768b98192
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,8 @@
+blank_issues_enabled: false
+contact_links:
+ - name: Question
+ url: https://github.com/HaishinKit/HaishinKit.swift/discussions
+ about: Please ask and answer questions here with *Q&A*
+ - name: Feature request
+ url: https://github.com/HaishinKit/HaishinKit.swift/discussions
+ about: You'd like to discuss a feature request here with *Idea*.
diff --git a/Vendor/HaishinKit.swift/.github/dependabot.yml b/Vendor/HaishinKit.swift/.github/dependabot.yml
new file mode 100644
index 000000000..cad58f637
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/dependabot.yml
@@ -0,0 +1,13 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+version: 2
+updates:
+ - package-ecosystem: "bundler" # See documentation for possible values
+ directory: "/" # Location of package manifests
+ schedule:
+ interval: "weekly"
+ day: "sunday"
+ time: "09:00"
+ timezone: "Asia/Tokyo"
diff --git a/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml b/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml
new file mode 100644
index 000000000..4c6826361
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml
@@ -0,0 +1,30 @@
+name: AI Moderator
+on:
+ issues:
+ types: [opened]
+ issue_comment:
+ types: [created]
+ pull_request_review_comment:
+ types: [created]
+
+jobs:
+ spam-detection:
+ runs-on: ubuntu-latest
+ permissions:
+ issues: write
+ pull-requests: write
+ models: read
+ contents: read
+ steps:
+ - uses: actions/checkout@v4
+ - uses: github/ai-moderator@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ spam-label: 'spam'
+ ai-label: 'ai-generated'
+ minimize-detected-comments: true
+ # Built-in prompt configuration (all enabled by default)
+ enable-spam-detection: true
+ enable-link-spam-detection: true
+ # enable-ai-detection: true
+ # custom-prompt-path: '.github/prompts/my-custom.prompt.yml' # Optional
diff --git a/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml b/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml
new file mode 100644
index 000000000..acd432f98
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml
@@ -0,0 +1,17 @@
+name: 'Lock Threads'
+on:
+ schedule:
+ - cron: '0 1 * * 1,4'
+ workflow_dispatch:
+permissions:
+ issues: write
+concurrency:
+ group: lock
+jobs:
+ action:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: dessant/lock-threads@v4
+ with:
+ issue-inactive-days: '60'
+ process-only: 'issues'
diff --git a/Vendor/HaishinKit.swift/.github/workflows/release.yml b/Vendor/HaishinKit.swift/.github/workflows/release.yml
new file mode 100644
index 000000000..190200cbf
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/workflows/release.yml
@@ -0,0 +1,24 @@
+name: Release
+on:
+ push:
+ tags:
+ - '*.*.*'
+jobs:
+ build:
+ runs-on: macos-15
+ steps:
+ - uses: actions/checkout@v4
+ - name: Select Xcode version
+ run: sudo xcode-select -s '/Applications/Xcode_26.0.app'
+ - name: Setup SSH
+ run: |
+ mkdir -p ~/.ssh
+ echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
+ chmod 600 ~/.ssh/id_rsa
+ ssh-keyscan github.com >> ~/.ssh/known_hosts
+ - name: bundle exec fastlane document
+ env:
+ DANGER_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ bundle install
+ bundle exec fastlane document version:${{ github.ref_name }}
diff --git a/Vendor/HaishinKit.swift/.github/workflows/review.yml b/Vendor/HaishinKit.swift/.github/workflows/review.yml
new file mode 100644
index 000000000..d1bda2d70
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.github/workflows/review.yml
@@ -0,0 +1,15 @@
+name: Review
+on: [pull_request]
+jobs:
+ build:
+ runs-on: macos-26
+ steps:
+ - uses: actions/checkout@v4
+ - name: Select Xcode version
+ run: sudo xcode-select -s '/Applications/Xcode_26.2.0.app'
+ - name: bundle install
+ env:
+ DANGER_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ bundle install
+ bundle exec fastlane review
diff --git a/Vendor/HaishinKit.swift/.gitignore b/Vendor/HaishinKit.swift/.gitignore
new file mode 100644
index 000000000..aa0fdf193
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.gitignore
@@ -0,0 +1,33 @@
+*.xcodeproj/xcuserdata/*
+*.xcworkspace/xcuserdata/*
+build
+.build
+*.xccheckout
+*.moved-aside
+*.xcuserstate
+*.xcscmblueprint
+.DS_Store
+Pods
+
+soe/bat/*
+soe/obj/*
+soe/bin/*
+soe/air/*
+soe/AIR_readme.txt
+
+Carthage/*
+
+.idea/
+.vscode/
+.swiftpm/
+
+*.xcsettings
+docs/undocumented.json
+fastlane/report.xml
+libdatachannel.xcframework
+docs/docsets/HaishinKit.docset/Contents/Resources/Documents/undocumented.json
+HaishinKit.json
+SRTHaishinKit.json
+SRTHaishinKit/Vendor/SRT/OpenSSL/
+SRTHaishinKit/Vendor/SRT/srt/
+SRTHaishinKit/Vendor/SRT/Includes/
diff --git a/Vendor/HaishinKit.swift/.ruby-version b/Vendor/HaishinKit.swift/.ruby-version
new file mode 100644
index 000000000..f9892605c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.ruby-version
@@ -0,0 +1 @@
+3.4.4
diff --git a/Vendor/HaishinKit.swift/.swiftlint.yml b/Vendor/HaishinKit.swift/.swiftlint.yml
new file mode 100644
index 000000000..601e04651
--- /dev/null
+++ b/Vendor/HaishinKit.swift/.swiftlint.yml
@@ -0,0 +1,89 @@
+disabled_rules:
+ - identifier_name
+ - force_cast
+ - line_length
+ - type_body_length
+ - function_body_length
+ - file_length
+ - cyclomatic_complexity
+ - compiler_protocol_init
+analyzer_rules:
+ - unused_declaration
+ - unused_import
+excluded:
+ - Carthage
+ - Pods
+ - Vendor
+ - .build
+opt_in_rules:
+# - anyobject_protocol
+ - array_init
+# - attributes
+# - closure_body_length
+ - closure_end_indentation
+ - closure_spacing
+ - collection_alignment
+ - conditional_returns_on_newline
+ - contains_over_first_not_nil
+ - convenience_type
+# - discouraged_object_literal
+ - discouraged_optional_boolean
+# - discouraged_optional_collection
+ - empty_count
+ - empty_string
+# - explicit_acl
+# - explicit_enum_raw_value
+# - explicit_init
+# - explicit_self
+# - explicit_type_interface
+# - fallthrough
+# - file_header
+# - file_name
+ - first_where
+# - force_unwrapping
+ - function_default_parameter_at_end
+ - identical_operands
+# - implicit_return
+ - joined_default_parameter
+ - last_where
+ - legacy_random
+ - let_var_whitespace
+ - literal_expression_end_indentation
+# - lower_acl_than_parent
+# - missing_docs
+ - modifier_order
+ - multiline_arguments
+ - multiline_function_chains
+ - multiline_literal_brackets
+ - multiline_parameters
+# - no_extension_access_modifier
+# - no_grouping_extension
+# - number_separator
+ - operator_usage_whitespace
+ - overridden_super_call
+ - override_in_extension
+ - pattern_matching_keywords
+# - prefixed_toplevel_constant
+ - private_outlet
+ - private_over_fileprivate
+# - prohibited_interface_builder
+ - quick_discouraged_call
+ - redundant_nil_coalescing
+ - redundant_type_annotation
+ - required_enum_case
+ - shorthand_operator
+ - sorted_first_last
+ - sorted_imports
+ - static_operator
+# - strict_fileprivate
+ - switch_case_on_newline
+ - toggle_bool
+# - trailing_closure
+ - unavailable_function
+ - unneeded_parentheses_in_closure_argument
+ - untyped_error_in_catch
+ - vertical_parameter_alignment_on_call
+# - vertical_whitespace_between_cases
+ - vertical_whitespace_closing_braces
+ - vertical_whitespace_opening_braces
+# - yoda_condition
diff --git a/Vendor/HaishinKit.swift/Dangerfile b/Vendor/HaishinKit.swift/Dangerfile
new file mode 100644
index 000000000..e78afff59
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Dangerfile
@@ -0,0 +1,3 @@
+github.dismiss_out_of_range_messages
+
+privacymanifest.report
diff --git a/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift b/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift
new file mode 100644
index 000000000..7eff82bfb
--- /dev/null
+++ b/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift
@@ -0,0 +1,109 @@
+import AVFoundation
+
+#if DEBUG
+extension AVAudioCommonFormat: CustomDebugStringConvertible {
+ public var debugDescription: String {
+ switch self {
+ case .pcmFormatFloat32:
+ return "float32"
+ case .pcmFormatFloat64:
+ return "float64"
+ case .pcmFormatInt16:
+ return "int16"
+ case .pcmFormatInt32:
+ return "int32"
+ case .otherFormat:
+ return "other"
+ @unknown default:
+ return "unknown"
+ }
+ }
+}
+
+extension AudioFormatID: CustomDebugStringConvertible {
+ public var debugDescription: String {
+ switch self {
+ case kAudioFormatAC3:
+ return "kAudioFormatAC3"
+ case kAudioFormatAES3:
+ return "kAudioFormatAES3"
+ case kAudioFormatALaw:
+ return "kAudioFormatALaw"
+ case kAudioFormatAMR:
+ return "kAudioFormatAMR"
+ case kAudioFormatAMR_WB:
+ return "kAudioFormatAMR_WB"
+ case kAudioFormatAppleIMA4:
+ return "kAudioFormatAppleIMA4"
+ case kAudioFormatAppleLossless:
+ return "kAudioFormatAppleLossless"
+ case kAudioFormatAudible:
+ return "kAudioFormatAudible"
+ case kAudioFormatDVIIntelIMA:
+ return "kAudioFormatDVIIntelIMA"
+ case kAudioFormatEnhancedAC3:
+ return "kAudioFormatEnhancedAC3"
+ case kAudioFormatFLAC:
+ return "kAudioFormatFLAC"
+ case kAudioFormatLinearPCM:
+ return "kAudioFormatLinearPCM"
+ case kAudioFormatMACE3:
+ return "kAudioFormatMACE3"
+ case kAudioFormatMACE6:
+ return "kAudioFormatMACE6"
+ case kAudioFormatMIDIStream:
+ return "kAudioFormatMIDIStream"
+ case kAudioFormatMPEG4AAC:
+ return "kAudioFormatMPEG4AAC"
+ case kAudioFormatMPEG4AAC_ELD:
+ return "kAudioFormatMPEG4AAC_ELD"
+ case kAudioFormatMPEG4AAC_ELD_SBR:
+ return "kAudioFormatMPEG4AAC_ELD_SBR"
+ case kAudioFormatMPEG4AAC_ELD_V2:
+ return "kAudioFormatMPEG4AAC_ELD_V2"
+ case kAudioFormatMPEG4AAC_HE:
+ return "kAudioFormatMPEG4AAC_HE"
+ case kAudioFormatMPEG4AAC_HE_V2:
+ return "kAudioFormatMPEG4AAC_HE_V2"
+ case kAudioFormatMPEG4AAC_LD:
+ return "kAudioFormatMPEG4AAC_LD"
+ case kAudioFormatMPEG4AAC_Spatial:
+ return "kAudioFormatMPEG4AAC_Spatial"
+ case kAudioFormatMPEG4CELP:
+ return "kAudioFormatMPEG4CELP"
+ case kAudioFormatMPEG4HVXC:
+ return "kAudioFormatMPEG4HVXC"
+ case kAudioFormatMPEG4TwinVQ:
+ return "kAudioFormatMPEG4TwinVQ"
+ case kAudioFormatMPEGD_USAC:
+ return "kAudioFormatMPEGD_USAC"
+ case kAudioFormatMPEGLayer1:
+ return "kAudioFormatMPEGLayer1"
+ case kAudioFormatMPEGLayer2:
+ return "kAudioFormatMPEGLayer2"
+ case kAudioFormatMPEGLayer3:
+ return "kAudioFormatMPEGLayer3"
+ case kAudioFormatMicrosoftGSM:
+ return "kAudioFormatMicrosoftGSM"
+ case kAudioFormatOpus:
+ return "kAudioFormatOpus"
+ case kAudioFormatParameterValueStream:
+ return "kAudioFormatParameterValueStream"
+ case kAudioFormatQDesign:
+ return "kAudioFormatQDesign"
+ case kAudioFormatQDesign2:
+ return "kAudioFormatQDesign2"
+ case kAudioFormatQUALCOMM:
+ return "kAudioFormatQUALCOMM"
+ case kAudioFormatTimeCode:
+ return "kAudioFormatTimeCode"
+ case kAudioFormatULaw:
+ return "kAudioFormatULaw"
+ case kAudioFormatiLBC:
+ return "kAudioFormatiLBC"
+ default:
+ return "unknown"
+ }
+ }
+}
+#endif
diff --git a/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift b/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift
new file mode 100644
index 000000000..ad76297fc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift
@@ -0,0 +1,182 @@
+import AVFoundation
+import Foundation
+
+extension AudioNode: CustomStringConvertible {
+ var description: String {
+ var description: [String] = []
+
+ for scope in BusScope.allCases {
+ guard let busCount = try? busCount(scope: scope) else {
+ description.append("failed to get \(scope.rawValue) bus count")
+ continue
+ }
+ guard busCount > 0 else {
+ continue
+ }
+ var busDescription: [String] = []
+ for busIndex in 0...size
+ let parameterIds = UnsafeMutablePointer.allocate(capacity: numberOfParameters)
+ defer { parameterIds.deallocate() }
+
+ if numberOfParameters > 0 {
+ status = AudioUnitGetProperty(audioUnit,
+ kAudioUnitProperty_ParameterList,
+ kAudioUnitScope_Global,
+ 0,
+ parameterIds,
+ ¶meterListSize)
+ guard status == noErr else {
+ throw Error.unableToRetrieveValue(status)
+ }
+ }
+
+ var info = AudioUnitParameterInfo()
+ var infoSize = UInt32(MemoryLayout.size)
+
+ for i in 0.. Set {
+ var result = Set()
+ allCases.forEach { flag in
+ if flag.flagValue & flagOptionSet == flag.flagValue {
+ result.insert(flag)
+ }
+ }
+ return result
+ }
+
+ static func flagOptionSet(from flagSet: Set) -> AudioFormatFlags {
+ var optionSet: AudioFormatFlags = 0
+ flagSet.forEach { flag in
+ optionSet |= flag.flagValue
+ }
+ return optionSet
+ }
+
+ public var description: String {
+ rawValue
+ }
+ }
+
+ struct ReadableFlagOptionSet: OptionSet, CustomStringConvertible {
+ public let rawValue: AudioFormatFlags
+ public let flags: Set
+
+ public init(rawValue value: AudioFormatFlags) {
+ self.rawValue = value
+ flags = ReadableFormatFlag.flags(from: rawValue)
+ }
+
+ public var description: String {
+ guard ReadableFormatFlag.flagOptionSet(from: flags) == rawValue else {
+ return "Unable to parse AudioFormatFlags"
+ }
+ let result = flags.sorted(by: { $0.rawValue < $1.rawValue }).map { $0.description }.joined(separator: " | ")
+ return "AudioFormatFlags(\(result))"
+ }
+ }
+
+ var readableFormatID: String {
+ switch mFormatID {
+ // swiftlint:disable switch_case_on_newline
+ case kAudioFormatLinearPCM: return "LinearPCM"
+ case kAudioFormatAC3: return "AC3"
+ case kAudioFormat60958AC3: return "60958AC3"
+ case kAudioFormatAppleIMA4: return "AppleIMA4"
+ case kAudioFormatMPEG4AAC: return "MPEG4AAC"
+ case kAudioFormatMPEG4CELP: return "MPEG4CELP"
+ case kAudioFormatMPEG4HVXC: return "MPEG4HVXC"
+ case kAudioFormatMPEG4TwinVQ: return "MPEG4TwinVQ"
+ case kAudioFormatMACE3: return "MACE3"
+ case kAudioFormatMACE6: return "MACE6"
+ case kAudioFormatULaw: return "ULaw"
+ case kAudioFormatALaw: return "ALaw"
+ case kAudioFormatQDesign: return "QDesign"
+ case kAudioFormatQDesign2: return "QDesign2"
+ case kAudioFormatQUALCOMM: return "QUALCOMM"
+ case kAudioFormatMPEGLayer1: return "MPEGLayer1"
+ case kAudioFormatMPEGLayer2: return "MPEGLayer2"
+ case kAudioFormatMPEGLayer3: return "MPEGLayer3"
+ case kAudioFormatTimeCode: return "TimeCode"
+ case kAudioFormatMIDIStream: return "MIDIStream"
+ case kAudioFormatParameterValueStream: return "ParameterValueStream"
+ case kAudioFormatAppleLossless: return "AppleLossless"
+ case kAudioFormatMPEG4AAC_HE: return "MPEG4AAC_HE"
+ case kAudioFormatMPEG4AAC_LD: return "MPEG4AAC_LD"
+ case kAudioFormatMPEG4AAC_ELD: return "MPEG4AAC_ELD"
+ case kAudioFormatMPEG4AAC_ELD_SBR: return "MPEG4AAC_ELD_SBR"
+ case kAudioFormatMPEG4AAC_ELD_V2: return "MPEG4AAC_ELD_V2"
+ case kAudioFormatMPEG4AAC_HE_V2: return "MPEG4AAC_HE_V2"
+ case kAudioFormatMPEG4AAC_Spatial: return "MPEG4AAC_Spatial"
+ case kAudioFormatAMR: return "AMR"
+ case kAudioFormatAMR_WB: return "AMR_WB"
+ case kAudioFormatAudible: return "Audible"
+ case kAudioFormatiLBC: return "iLBC"
+ case kAudioFormatDVIIntelIMA: return "DVIIntelIMA"
+ case kAudioFormatMicrosoftGSM: return "MicrosoftGSM"
+ case kAudioFormatAES3: return "AES3"
+ case kAudioFormatEnhancedAC3: return "EnhancedAC3"
+ default: return "unknown_(\(Int(mFormatID)))"
+ // swiftlint:enable switch_case_on_newline
+ }
+ }
+
+ var readableFlags: ReadableFlagOptionSet {
+ ReadableFlagOptionSet(rawValue: mFormatFlags)
+ }
+}
+
+extension AudioStreamBasicDescription: CustomDebugStringConvertible {
+ // MARK: CustomDebugStringConvertible
+ public var debugDescription: String {
+ "AudioStreamBasicDescription(mSampleRate: \(mSampleRate), mFormatID: \(mFormatID) \(readableFormatID), "
+ + "mFormatFlags: \(mFormatFlags) \(readableFlags), mBytesPerPacket: \(mBytesPerPacket), "
+ + "mFramesPerPacket: \(mFramesPerPacket), mBytesPerFrame: \(mBytesPerFrame), "
+ + "mChannelsPerFrame: \(mChannelsPerFrame), mBitsPerChannel: \(mBitsPerChannel), mReserved: \(mReserved)"
+ }
+}
+
+#endif
diff --git a/Vendor/HaishinKit.swift/Examples/.swiftlint.yml b/Vendor/HaishinKit.swift/Examples/.swiftlint.yml
new file mode 100644
index 000000000..601e04651
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/.swiftlint.yml
@@ -0,0 +1,89 @@
+disabled_rules:
+ - identifier_name
+ - force_cast
+ - line_length
+ - type_body_length
+ - function_body_length
+ - file_length
+ - cyclomatic_complexity
+ - compiler_protocol_init
+analyzer_rules:
+ - unused_declaration
+ - unused_import
+excluded:
+ - Carthage
+ - Pods
+ - Vendor
+ - .build
+opt_in_rules:
+# - anyobject_protocol
+ - array_init
+# - attributes
+# - closure_body_length
+ - closure_end_indentation
+ - closure_spacing
+ - collection_alignment
+ - conditional_returns_on_newline
+ - contains_over_first_not_nil
+ - convenience_type
+# - discouraged_object_literal
+ - discouraged_optional_boolean
+# - discouraged_optional_collection
+ - empty_count
+ - empty_string
+# - explicit_acl
+# - explicit_enum_raw_value
+# - explicit_init
+# - explicit_self
+# - explicit_type_interface
+# - fallthrough
+# - file_header
+# - file_name
+ - first_where
+# - force_unwrapping
+ - function_default_parameter_at_end
+ - identical_operands
+# - implicit_return
+ - joined_default_parameter
+ - last_where
+ - legacy_random
+ - let_var_whitespace
+ - literal_expression_end_indentation
+# - lower_acl_than_parent
+# - missing_docs
+ - modifier_order
+ - multiline_arguments
+ - multiline_function_chains
+ - multiline_literal_brackets
+ - multiline_parameters
+# - no_extension_access_modifier
+# - no_grouping_extension
+# - number_separator
+ - operator_usage_whitespace
+ - overridden_super_call
+ - override_in_extension
+ - pattern_matching_keywords
+# - prefixed_toplevel_constant
+ - private_outlet
+ - private_over_fileprivate
+# - prohibited_interface_builder
+ - quick_discouraged_call
+ - redundant_nil_coalescing
+ - redundant_type_annotation
+ - required_enum_case
+ - shorthand_operator
+ - sorted_first_last
+ - sorted_imports
+ - static_operator
+# - strict_fileprivate
+ - switch_case_on_newline
+ - toggle_bool
+# - trailing_closure
+ - unavailable_function
+ - unneeded_parentheses_in_closure_argument
+ - untyped_error_in_catch
+ - vertical_parameter_alignment_on_call
+# - vertical_whitespace_between_cases
+ - vertical_whitespace_closing_braces
+ - vertical_whitespace_opening_braces
+# - yoda_condition
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj
new file mode 100644
index 000000000..091cdc0c7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj
@@ -0,0 +1,1287 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 70;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ BC05F96E2E3D248A002BCC92 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; };
+ BC4B7DCD2E86D06700973BD7 /* ReplayKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */; };
+ BC4B7DD42E86D06700973BD7 /* Screencast.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = BC4B7DCC2E86D06700973BD7 /* Screencast.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
+ BC4B7DE02E86D13C00973BD7 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DDF2E86D13C00973BD7 /* HaishinKit */; };
+ BC4B7DE22E86D13C00973BD7 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */; };
+ BC4B7DE42E86D13C00973BD7 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */; };
+ BC4B7DE62E86D13C00973BD7 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */; };
+ BC4B7DE82E86D13C00973BD7 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */; };
+ BC4B7DE92E86D14A00973BD7 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; };
+ BC6842B62E3D1294008FBFEE /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B52E3D1294008FBFEE /* HaishinKit */; };
+ BC6842B82E3D1294008FBFEE /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */; };
+ BC6842BA2E3D1294008FBFEE /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */; };
+ BC6842BC2E3D1294008FBFEE /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */; };
+ BC82125D2EB8FB1500419D06 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC82125C2EB8FB1500419D06 /* HaishinKit */; };
+ BC82125F2EB8FB1C00419D06 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */; };
+ BC8212612EB8FB2000419D06 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212602EB8FB2000419D06 /* RTMPHaishinKit */; };
+ BC8212632EB8FB2400419D06 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212622EB8FB2400419D06 /* SRTHaishinKit */; };
+ BC8212652EB8FB2D00419D06 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */; };
+ BC8212662EB8FB5200419D06 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; };
+ BC91298C2D12EB5E0077EFF3 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; };
+ BC912A812D12F10C0077EFF3 /* libc++.1.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = BC912A802D12F0900077EFF3 /* libc++.1.tbd */; };
+ BCD2478C2E54C04E00C64280 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD2478B2E54C04E00C64280 /* RTCHaishinKit */; };
+ BCD917072D3A94BC00D30743 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD917062D3A94BC00D30743 /* HaishinKit */; };
+ BCD917092D3A94BC00D30743 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD917082D3A94BC00D30743 /* MoQTHaishinKit */; };
+ BCD9170B2D3A94BC00D30743 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */; };
+ BCEC2BC02E104D9A00422F8F /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */; };
+ BCFE628F2E7710A100941209 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; };
+ BCFE62912E7710D800941209 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62902E7710D800941209 /* HaishinKit */; };
+ BCFE62A82E77179F00941209 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62A72E77179F00941209 /* RTCHaishinKit */; };
+ BCFE62AA2E77179F00941209 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62A92E77179F00941209 /* RTMPHaishinKit */; };
+ BCFE62AC2E77179F00941209 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62AB2E77179F00941209 /* SRTHaishinKit */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXContainerItemProxy section */
+ BC4B7DD22E86D06700973BD7 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 2945CBB41B4BE66000104112 /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = BC4B7DCB2E86D06700973BD7;
+ remoteInfo = Screencast;
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXCopyFilesBuildPhase section */
+ BC7A0E592B0894B9005FB2F7 /* Embed Frameworks */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 10;
+ files = (
+ );
+ name = "Embed Frameworks";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BCD5C8062E3F189700A69F16 /* Embed Foundation Extensions */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 13;
+ files = (
+ BC4B7DD42E86D06700973BD7 /* Screencast.appex in Embed Foundation Extensions */,
+ );
+ name = "Embed Foundation Extensions";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
+/* Begin PBXFileReference section */
+ BC4B7DCC2E86D06700973BD7 /* Screencast.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = Screencast.appex; sourceTree = BUILT_PRODUCTS_DIR; };
+ BC6841622E3CFBE0008FBFEE /* Example iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
+ BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example visionOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
+ BC82124C2EB8F8BE00419D06 /* Example tvOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example tvOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
+ BC91287D2D12E7540077EFF3 /* Examples.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = Examples.xcodeproj; sourceTree = ""; };
+ BC9128F82D12E9970077EFF3 /* Examples.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = Examples.xcodeproj; sourceTree = ""; };
+ BC9129382D12EA820077EFF3 /* Preference.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Preference.swift; sourceTree = ""; };
+ BC912A7F2D12F0830077EFF3 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "Platforms/XROS.platform/Developer/SDKs/XROS2.1.sdk/usr/lib/libc++.tbd"; sourceTree = DEVELOPER_DIR; };
+ BC912A802D12F0900077EFF3 /* libc++.1.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.1.tbd"; path = "Platforms/XROS.platform/Developer/SDKs/XROS2.1.sdk/usr/lib/libc++.1.tbd"; sourceTree = DEVELOPER_DIR; };
+ BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ReplayKit.framework; path = System/Library/Frameworks/ReplayKit.framework; sourceTree = SDKROOT; };
+ BCD916432D3A934400D30743 /* HaishinKit */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = HaishinKit; path = ../; sourceTree = SOURCE_ROOT; };
+ BCFE627E2E770A9900941209 /* Example macOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example macOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
+/* End PBXFileReference section */
+
+/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */
+ BC36B4F82E77201200AADEC1 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ PreferenceView.swift,
+ PreferenceViewModel.swift,
+ VisualEffect.swift,
+ );
+ target = BCFE627D2E770A9900941209 /* Example macOS */;
+ };
+ BC4B7DDD2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ Info.plist,
+ Screencast/Info.plist,
+ Screencast/SampleHandler.swift,
+ );
+ target = BC6841612E3CFBE0008FBFEE /* Example iOS */;
+ };
+ BC4B7DDE2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ Screencast/SampleHandler.swift,
+ );
+ target = BC4B7DCB2E86D06700973BD7 /* Screencast */;
+ };
+ BC8212582EB8FA8A00419D06 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ PlaybackView.swift,
+ PlaybackViewModel.swift,
+ PreferenceView.swift,
+ PreferenceViewModel.swift,
+ );
+ target = BC82124B2EB8F8BE00419D06 /* Example tvOS */;
+ };
+ BC912A722D12F0100077EFF3 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ ContentView.swift,
+ HaishinApp.swift,
+ );
+ target = BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */;
+ };
+ BCABAC0D2E65BBBC00B0E878 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = {
+ isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
+ membershipExceptions = (
+ PlaybackView.swift,
+ PlaybackViewModel.swift,
+ );
+ target = BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */;
+ };
+/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */
+
+/* Begin PBXFileSystemSynchronizedRootGroup section */
+ BC6841632E3CFBE0008FBFEE /* iOS */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (BC4B7DDD2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC36B4F82E77201200AADEC1 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BCABAC0D2E65BBBC00B0E878 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC4B7DDE2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC8212582EB8FA8A00419D06 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = iOS; sourceTree = ""; };
+ BC82124D2EB8F8BE00419D06 /* tvOS */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = tvOS; sourceTree = ""; };
+ BC9129712D12EA930077EFF3 /* visionOS */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (BC912A722D12F0100077EFF3 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = visionOS; sourceTree = ""; };
+ BCFE627F2E770A9900941209 /* macOS */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = macOS; sourceTree = ""; };
+/* End PBXFileSystemSynchronizedRootGroup section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ BC4B7DC92E86D06700973BD7 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC4B7DE22E86D13C00973BD7 /* MoQTHaishinKit in Frameworks */,
+ BC4B7DE62E86D13C00973BD7 /* RTMPHaishinKit in Frameworks */,
+ BC4B7DE82E86D13C00973BD7 /* SRTHaishinKit in Frameworks */,
+ BC4B7DE02E86D13C00973BD7 /* HaishinKit in Frameworks */,
+ BC4B7DE42E86D13C00973BD7 /* RTCHaishinKit in Frameworks */,
+ BC4B7DCD2E86D06700973BD7 /* ReplayKit.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC68415F2E3CFBE0008FBFEE /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC6842B62E3D1294008FBFEE /* HaishinKit in Frameworks */,
+ BCD2478C2E54C04E00C64280 /* RTCHaishinKit in Frameworks */,
+ BC6842B82E3D1294008FBFEE /* MoQTHaishinKit in Frameworks */,
+ BC6842BC2E3D1294008FBFEE /* SRTHaishinKit in Frameworks */,
+ BC6842BA2E3D1294008FBFEE /* RTMPHaishinKit in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC7A0E3A2B088FA7005FB2F7 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BCD917092D3A94BC00D30743 /* MoQTHaishinKit in Frameworks */,
+ BCEC2BC02E104D9A00422F8F /* RTMPHaishinKit in Frameworks */,
+ BCD917072D3A94BC00D30743 /* HaishinKit in Frameworks */,
+ BCD9170B2D3A94BC00D30743 /* SRTHaishinKit in Frameworks */,
+ BC912A812D12F10C0077EFF3 /* libc++.1.tbd in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC8212492EB8F8BE00419D06 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC82125F2EB8FB1C00419D06 /* RTCHaishinKit in Frameworks */,
+ BC82125D2EB8FB1500419D06 /* HaishinKit in Frameworks */,
+ BC8212632EB8FB2400419D06 /* SRTHaishinKit in Frameworks */,
+ BC8212652EB8FB2D00419D06 /* MoQTHaishinKit in Frameworks */,
+ BC8212612EB8FB2000419D06 /* RTMPHaishinKit in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BCFE627B2E770A9900941209 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BCFE62912E7710D800941209 /* HaishinKit in Frameworks */,
+ BCFE62AC2E77179F00941209 /* SRTHaishinKit in Frameworks */,
+ BCFE62AA2E77179F00941209 /* RTMPHaishinKit in Frameworks */,
+ BCFE62A82E77179F00941209 /* RTCHaishinKit in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 2945CBB31B4BE66000104112 = {
+ isa = PBXGroup;
+ children = (
+ BC9129382D12EA820077EFF3 /* Preference.swift */,
+ BC912A762D12F0600077EFF3 /* Frameworks */,
+ 2945CBBE1B4BE66000104112 /* Products */,
+ BC6841632E3CFBE0008FBFEE /* iOS */,
+ BCFE627F2E770A9900941209 /* macOS */,
+ BC82124D2EB8F8BE00419D06 /* tvOS */,
+ BC9129712D12EA930077EFF3 /* visionOS */,
+ );
+ sourceTree = "";
+ wrapsLines = 0;
+ };
+ 2945CBBE1B4BE66000104112 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */,
+ BC6841622E3CFBE0008FBFEE /* Example iOS.app */,
+ BCFE627E2E770A9900941209 /* Example macOS.app */,
+ BC4B7DCC2E86D06700973BD7 /* Screencast.appex */,
+ BC82124C2EB8F8BE00419D06 /* Example tvOS.app */,
+ );
+ name = Products;
+ sourceTree = "";
+ };
+ BC9128802D12E7540077EFF3 /* Products */ = {
+ isa = PBXGroup;
+ name = Products;
+ sourceTree = "";
+ };
+ BC9128FB2D12E9970077EFF3 /* Products */ = {
+ isa = PBXGroup;
+ name = Products;
+ sourceTree = "";
+ };
+ BC912A762D12F0600077EFF3 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ BC912A802D12F0900077EFF3 /* libc++.1.tbd */,
+ BC912A7F2D12F0830077EFF3 /* libc++.tbd */,
+ BCD916432D3A934400D30743 /* HaishinKit */,
+ BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */,
+ );
+ name = Frameworks;
+ sourceTree = "";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ BC4B7DCB2E86D06700973BD7 /* Screencast */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = BC4B7DD62E86D06700973BD7 /* Build configuration list for PBXNativeTarget "Screencast" */;
+ buildPhases = (
+ BC4B7DC82E86D06700973BD7 /* Sources */,
+ BC4B7DC92E86D06700973BD7 /* Frameworks */,
+ BC4B7DCA2E86D06700973BD7 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = Screencast;
+ packageProductDependencies = (
+ BC4B7DDF2E86D13C00973BD7 /* HaishinKit */,
+ BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */,
+ BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */,
+ BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */,
+ BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */,
+ );
+ productName = Screencast;
+ productReference = BC4B7DCC2E86D06700973BD7 /* Screencast.appex */;
+ productType = "com.apple.product-type.app-extension";
+ };
+ BC6841612E3CFBE0008FBFEE /* Example iOS */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = BC68416B2E3CFBE2008FBFEE /* Build configuration list for PBXNativeTarget "Example iOS" */;
+ buildPhases = (
+ BC68415E2E3CFBE0008FBFEE /* Sources */,
+ BC68415F2E3CFBE0008FBFEE /* Frameworks */,
+ BC6841602E3CFBE0008FBFEE /* Resources */,
+ BCD5C8062E3F189700A69F16 /* Embed Foundation Extensions */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ BC4B7DD32E86D06700973BD7 /* PBXTargetDependency */,
+ );
+ fileSystemSynchronizedGroups = (
+ BC6841632E3CFBE0008FBFEE /* iOS */,
+ );
+ name = "Example iOS";
+ packageProductDependencies = (
+ BC6842B52E3D1294008FBFEE /* HaishinKit */,
+ BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */,
+ BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */,
+ BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */,
+ BCD2478B2E54C04E00C64280 /* RTCHaishinKit */,
+ );
+ productName = "Example iOS";
+ productReference = BC6841622E3CFBE0008FBFEE /* Example iOS.app */;
+ productType = "com.apple.product-type.application";
+ };
+ BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = BC7A0E542B088FA9005FB2F7 /* Build configuration list for PBXNativeTarget "Example visionOS" */;
+ buildPhases = (
+ BC7A0E392B088FA7005FB2F7 /* Sources */,
+ BC7A0E3A2B088FA7005FB2F7 /* Frameworks */,
+ BC7A0E3B2B088FA7005FB2F7 /* Resources */,
+ BC7A0E592B0894B9005FB2F7 /* Embed Frameworks */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = "Example visionOS";
+ packageProductDependencies = (
+ BCD917062D3A94BC00D30743 /* HaishinKit */,
+ BCD917082D3A94BC00D30743 /* MoQTHaishinKit */,
+ BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */,
+ BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */,
+ );
+ productName = "Example visionOS";
+ productReference = BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */;
+ productType = "com.apple.product-type.application";
+ };
+ BC82124B2EB8F8BE00419D06 /* Example tvOS */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = BC8212542EB8F8BF00419D06 /* Build configuration list for PBXNativeTarget "Example tvOS" */;
+ buildPhases = (
+ BC8212482EB8F8BE00419D06 /* Sources */,
+ BC8212492EB8F8BE00419D06 /* Frameworks */,
+ BC82124A2EB8F8BE00419D06 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ fileSystemSynchronizedGroups = (
+ BC82124D2EB8F8BE00419D06 /* tvOS */,
+ );
+ name = "Example tvOS";
+ packageProductDependencies = (
+ BC82125C2EB8FB1500419D06 /* HaishinKit */,
+ BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */,
+ BC8212602EB8FB2000419D06 /* RTMPHaishinKit */,
+ BC8212622EB8FB2400419D06 /* SRTHaishinKit */,
+ BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */,
+ );
+ productName = "Example tvOS";
+ productReference = BC82124C2EB8F8BE00419D06 /* Example tvOS.app */;
+ productType = "com.apple.product-type.application";
+ };
+ BCFE627D2E770A9900941209 /* Example macOS */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = BCFE62872E770A9C00941209 /* Build configuration list for PBXNativeTarget "Example macOS" */;
+ buildPhases = (
+ BCFE627A2E770A9900941209 /* Sources */,
+ BCFE627B2E770A9900941209 /* Frameworks */,
+ BCFE627C2E770A9900941209 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ fileSystemSynchronizedGroups = (
+ BCFE627F2E770A9900941209 /* macOS */,
+ );
+ name = "Example macOS";
+ packageProductDependencies = (
+ BCFE62902E7710D800941209 /* HaishinKit */,
+ BCFE62A72E77179F00941209 /* RTCHaishinKit */,
+ BCFE62A92E77179F00941209 /* RTMPHaishinKit */,
+ BCFE62AB2E77179F00941209 /* SRTHaishinKit */,
+ );
+ productName = "Example macOS";
+ productReference = BCFE627E2E770A9900941209 /* Example macOS.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 2945CBB41B4BE66000104112 /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ BuildIndependentTargetsInParallel = YES;
+ LastSwiftMigration = 0700;
+ LastSwiftUpdateCheck = 2600;
+ LastUpgradeCheck = 1540;
+ ORGANIZATIONNAME = "";
+ TargetAttributes = {
+ BC4B7DCB2E86D06700973BD7 = {
+ CreatedOnToolsVersion = 16.4;
+ };
+ BC6841612E3CFBE0008FBFEE = {
+ CreatedOnToolsVersion = 16.4;
+ };
+ BC7A0E3C2B088FA7005FB2F7 = {
+ CreatedOnToolsVersion = 15.1;
+ };
+ BC82124B2EB8F8BE00419D06 = {
+ CreatedOnToolsVersion = 26.0.1;
+ };
+ BCFE627D2E770A9900941209 = {
+ CreatedOnToolsVersion = 16.4;
+ };
+ };
+ };
+ buildConfigurationList = 2945CBB71B4BE66000104112 /* Build configuration list for PBXProject "Examples" */;
+ compatibilityVersion = "Xcode 13.0";
+ developmentRegion = en;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 2945CBB31B4BE66000104112;
+ packageReferences = (
+ );
+ productRefGroup = 2945CBBE1B4BE66000104112 /* Products */;
+ projectDirPath = "";
+ projectReferences = (
+ {
+ ProductGroup = BC9128FB2D12E9970077EFF3 /* Products */;
+ ProjectRef = BC9128F82D12E9970077EFF3 /* Examples.xcodeproj */;
+ },
+ {
+ ProductGroup = BC9128802D12E7540077EFF3 /* Products */;
+ ProjectRef = BC91287D2D12E7540077EFF3 /* Examples.xcodeproj */;
+ },
+ );
+ projectRoot = "";
+ targets = (
+ BC6841612E3CFBE0008FBFEE /* Example iOS */,
+ BCFE627D2E770A9900941209 /* Example macOS */,
+ BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */,
+ BC4B7DCB2E86D06700973BD7 /* Screencast */,
+ BC82124B2EB8F8BE00419D06 /* Example tvOS */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ BC4B7DCA2E86D06700973BD7 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC6841602E3CFBE0008FBFEE /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC7A0E3B2B088FA7005FB2F7 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC82124A2EB8F8BE00419D06 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BCFE627C2E770A9900941209 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ BC4B7DC82E86D06700973BD7 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC4B7DE92E86D14A00973BD7 /* Preference.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC68415E2E3CFBE0008FBFEE /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC05F96E2E3D248A002BCC92 /* Preference.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC7A0E392B088FA7005FB2F7 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC91298C2D12EB5E0077EFF3 /* Preference.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BC8212482EB8F8BE00419D06 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BC8212662EB8FB5200419D06 /* Preference.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ BCFE627A2E770A9900941209 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ BCFE628F2E7710A100941209 /* Preference.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXTargetDependency section */
+ BC4B7DD32E86D06700973BD7 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = BC4B7DCB2E86D06700973BD7 /* Screencast */;
+ targetProxy = BC4B7DD22E86D06700973BD7 /* PBXContainerItemProxy */;
+ };
+/* End PBXTargetDependency section */
+
+/* Begin XCBuildConfiguration section */
+ 2945CBD11B4BE66000104112 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ CURRENT_PROJECT_VERSION = 1;
+ DEAD_CODE_STRIPPING = YES;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ ENABLE_USER_SCRIPT_SANDBOXING = YES;
+ EXCLUDED_ARCHS = "";
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_SYMBOLS_PRIVATE_EXTERN = NO;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
+ MACOSX_DEPLOYMENT_TARGET = 10.15;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = "";
+ SUPPORTED_PLATFORMS = macosx;
+ SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ TVOS_DEPLOYMENT_TARGET = 13.0;
+ VERSIONING_SYSTEM = "apple-generic";
+ VERSION_INFO_PREFIX = "";
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Debug;
+ };
+ 2945CBD21B4BE66000104112 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ CURRENT_PROJECT_VERSION = 1;
+ DEAD_CODE_STRIPPING = YES;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_USER_SCRIPT_SANDBOXING = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
+ MACOSX_DEPLOYMENT_TARGET = 10.15;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = "";
+ SUPPORTED_PLATFORMS = macosx;
+ SWIFT_COMPILATION_MODE = wholemodule;
+ SWIFT_OPTIMIZATION_LEVEL = "-O";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ TVOS_DEPLOYMENT_TARGET = 13.0;
+ VALIDATE_PRODUCT = YES;
+ VERSIONING_SYSTEM = "apple-generic";
+ VERSION_INFO_PREFIX = "";
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Release;
+ };
+ BC4B7DD72E86D06700973BD7 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = iOS/Screencast/Info.plist;
+ INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
+ INFOPLIST_KEY_NSHumanReadableCopyright = "";
+ IPHONEOS_DEPLOYMENT_TARGET = 15.6;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ "@executable_path/../../Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MARKETING_VERSION = 1.0;
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS.Screencast";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = iphoneos;
+ SKIP_INSTALL = YES;
+ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ BC4B7DD82E86D06700973BD7 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = iOS/Screencast/Info.plist;
+ INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
+ INFOPLIST_KEY_NSHumanReadableCopyright = "";
+ IPHONEOS_DEPLOYMENT_TARGET = 15.6;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ "@executable_path/../../Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MARKETING_VERSION = 1.0;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS.Screencast";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = iphoneos;
+ SKIP_INSTALL = YES;
+ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Release;
+ };
+ BC68416C2E3CFBE2008FBFEE /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_ENTITLEMENTS = iOS/HaishinApp.entitlements;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_HARDENED_RUNTIME = YES;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = iOS/Info.plist;
+ INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
+ INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming.";
+ INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming.";
+ INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content.";
+ "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
+ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
+ INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+ INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+ IPHONEOS_DEPLOYMENT_TARGET = 16.0;
+ LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
+ "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 13.5;
+ MARKETING_VERSION = 1.0;
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ REGISTER_APP_GROUPS = YES;
+ SDKROOT = auto;
+ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 6.0;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Debug;
+ };
+ BC68416D2E3CFBE2008FBFEE /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_ENTITLEMENTS = iOS/HaishinApp.entitlements;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_HARDENED_RUNTIME = YES;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = iOS/Info.plist;
+ INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
+ INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming.";
+ INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming.";
+ INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content.";
+ "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
+ "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
+ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
+ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
+ INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+ INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
+ IPHONEOS_DEPLOYMENT_TARGET = 16.0;
+ LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
+ "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 13.5;
+ MARKETING_VERSION = 1.0;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ REGISTER_APP_GROUPS = YES;
+ SDKROOT = auto;
+ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 6.0;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Release;
+ };
+ BC7A0E522B088FA9005FB2F7 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = visionOS/Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.15;
+ MARKETING_VERSION = 1.0;
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-visionOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = xros;
+ SUPPORTED_PLATFORMS = "xros xrsimulator";
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_STRICT_CONCURRENCY = complete;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = "1,2,7";
+ TVOS_DEPLOYMENT_TARGET = 13.0;
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Debug;
+ };
+ BC7A0E532B088FA9005FB2F7 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_FILE = Examples/visionOS/Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.15;
+ MARKETING_VERSION = 1.0;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-visionOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = xros;
+ SUPPORTED_PLATFORMS = "xros xrsimulator";
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_STRICT_CONCURRENCY = complete;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = "1,2,7";
+ TVOS_DEPLOYMENT_TARGET = 13.0;
+ XROS_DEPLOYMENT_TARGET = 1.0;
+ };
+ name = Release;
+ };
+ BC8212552EB8F8BF00419D06 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image";
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_KEY_UILaunchScreen_Generation = YES;
+ INFOPLIST_KEY_UIUserInterfaceStyle = Automatic;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MARKETING_VERSION = 1.0;
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = appletvos;
+ STRING_CATALOG_GENERATE_SYMBOLS = YES;
+ SUPPORTED_PLATFORMS = "appletvos appletvsimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+ SWIFT_APPROACHABLE_CONCURRENCY = YES;
+ SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = 3;
+ TVOS_DEPLOYMENT_TARGET = 15.0;
+ };
+ name = Debug;
+ };
+ BC8212562EB8F8BF00419D06 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image";
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_KEY_UILaunchScreen_Generation = YES;
+ INFOPLIST_KEY_UIUserInterfaceStyle = Automatic;
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MARKETING_VERSION = 1.0;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = appletvos;
+ STRING_CATALOG_GENERATE_SYMBOLS = YES;
+ SUPPORTED_PLATFORMS = "appletvos appletvsimulator";
+ SUPPORTS_MACCATALYST = NO;
+ SWIFT_APPROACHABLE_CONCURRENCY = YES;
+ SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
+ SWIFT_VERSION = 5.0;
+ TARGETED_DEVICE_FAMILY = 3;
+ TVOS_DEPLOYMENT_TARGET = 15.0;
+ };
+ name = Release;
+ };
+ BCFE62882E770A9C00941209 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_ENTITLEMENTS = macOS/HaishinApp.entitlements;
+ CODE_SIGN_STYLE = Automatic;
+ COMBINE_HIDPI_IMAGES = YES;
+ CURRENT_PROJECT_VERSION = 1;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_ASSET_PATHS = macOS/SampleVideo_360x240_5mb.mp4;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_HARDENED_RUNTIME = YES;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_KEY_NSCameraUsageDescription = "";
+ INFOPLIST_KEY_NSHumanReadableCopyright = "";
+ INFOPLIST_KEY_NSMicrophoneUsageDescription = "";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/../Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 15.5;
+ MARKETING_VERSION = 1.0;
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-macOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ REGISTER_APP_GROUPS = YES;
+ SDKROOT = macosx;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 5.0;
+ };
+ name = Debug;
+ };
+ BCFE62892E770A9C00941209 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
+ ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_ENTITLEMENTS = macOS/HaishinApp.entitlements;
+ CODE_SIGN_STYLE = Automatic;
+ COMBINE_HIDPI_IMAGES = YES;
+ CURRENT_PROJECT_VERSION = 1;
+ DEVELOPMENT_ASSET_PATHS = macOS/SampleVideo_360x240_5mb.mp4;
+ DEVELOPMENT_TEAM = SUEQ2SZ2L5;
+ ENABLE_HARDENED_RUNTIME = YES;
+ ENABLE_PREVIEWS = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu17;
+ GENERATE_INFOPLIST_FILE = YES;
+ INFOPLIST_KEY_NSCameraUsageDescription = "";
+ INFOPLIST_KEY_NSHumanReadableCopyright = "";
+ INFOPLIST_KEY_NSMicrophoneUsageDescription = "";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/../Frameworks",
+ );
+ LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
+ MACOSX_DEPLOYMENT_TARGET = 15.5;
+ MARKETING_VERSION = 1.0;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-macOS";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ REGISTER_APP_GROUPS = YES;
+ SDKROOT = macosx;
+ SWIFT_EMIT_LOC_STRINGS = YES;
+ SWIFT_VERSION = 5.0;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 2945CBB71B4BE66000104112 /* Build configuration list for PBXProject "Examples" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 2945CBD11B4BE66000104112 /* Debug */,
+ 2945CBD21B4BE66000104112 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ BC4B7DD62E86D06700973BD7 /* Build configuration list for PBXNativeTarget "Screencast" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BC4B7DD72E86D06700973BD7 /* Debug */,
+ BC4B7DD82E86D06700973BD7 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ BC68416B2E3CFBE2008FBFEE /* Build configuration list for PBXNativeTarget "Example iOS" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BC68416C2E3CFBE2008FBFEE /* Debug */,
+ BC68416D2E3CFBE2008FBFEE /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ BC7A0E542B088FA9005FB2F7 /* Build configuration list for PBXNativeTarget "Example visionOS" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BC7A0E522B088FA9005FB2F7 /* Debug */,
+ BC7A0E532B088FA9005FB2F7 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ BC8212542EB8F8BF00419D06 /* Build configuration list for PBXNativeTarget "Example tvOS" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BC8212552EB8F8BF00419D06 /* Debug */,
+ BC8212562EB8F8BF00419D06 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ BCFE62872E770A9C00941209 /* Build configuration list for PBXNativeTarget "Example macOS" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BCFE62882E770A9C00941209 /* Debug */,
+ BCFE62892E770A9C00941209 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+
+/* Begin XCSwiftPackageProductDependency section */
+ BC4B7DDF2E86D13C00973BD7 /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = HaishinKit;
+ };
+ BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = MoQTHaishinKit;
+ };
+ BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTCHaishinKit;
+ };
+ BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTMPHaishinKit;
+ };
+ BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = SRTHaishinKit;
+ };
+ BC6842B52E3D1294008FBFEE /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = HaishinKit;
+ };
+ BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = MoQTHaishinKit;
+ };
+ BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTMPHaishinKit;
+ };
+ BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = SRTHaishinKit;
+ };
+ BC82125C2EB8FB1500419D06 /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = HaishinKit;
+ };
+ BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTCHaishinKit;
+ };
+ BC8212602EB8FB2000419D06 /* RTMPHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTMPHaishinKit;
+ };
+ BC8212622EB8FB2400419D06 /* SRTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = SRTHaishinKit;
+ };
+ BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = MoQTHaishinKit;
+ };
+ BCD2478B2E54C04E00C64280 /* RTCHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTCHaishinKit;
+ };
+ BCD917062D3A94BC00D30743 /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = HaishinKit;
+ };
+ BCD917082D3A94BC00D30743 /* MoQTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = MoQTHaishinKit;
+ };
+ BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = SRTHaishinKit;
+ };
+ BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTMPHaishinKit;
+ };
+ BCFE62902E7710D800941209 /* HaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = HaishinKit;
+ };
+ BCFE62A72E77179F00941209 /* RTCHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTCHaishinKit;
+ };
+ BCFE62A92E77179F00941209 /* RTMPHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = RTMPHaishinKit;
+ };
+ BCFE62AB2E77179F00941209 /* SRTHaishinKit */ = {
+ isa = XCSwiftPackageProductDependency;
+ productName = SRTHaishinKit;
+ };
+/* End XCSwiftPackageProductDependency section */
+ };
+ rootObject = 2945CBB41B4BE66000104112 /* Project object */;
+}
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 000000000..919434a62
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,7 @@
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
new file mode 100644
index 000000000..18d981003
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
@@ -0,0 +1,8 @@
+
+
+
+
+ IDEDidComputeMac32BitWarning
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
new file mode 100644
index 000000000..6db9ce555
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
@@ -0,0 +1,33 @@
+{
+ "originHash" : "0623d92bee87e5013eb796ad9f159d469c5255adc372c70de458149479be7518",
+ "pins" : [
+ {
+ "identity" : "logboard",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/shogo4405/Logboard.git",
+ "state" : {
+ "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50",
+ "version" : "2.6.0"
+ }
+ },
+ {
+ "identity" : "swift-docc-plugin",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/swiftlang/swift-docc-plugin",
+ "state" : {
+ "revision" : "3e4f133a77e644a5812911a0513aeb7288b07d06",
+ "version" : "1.4.5"
+ }
+ },
+ {
+ "identity" : "swift-docc-symbolkit",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/swiftlang/swift-docc-symbolkit",
+ "state" : {
+ "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34",
+ "version" : "1.0.0"
+ }
+ }
+ ],
+ "version" : 3
+}
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme
new file mode 100644
index 000000000..8e4e9c162
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme
@@ -0,0 +1,80 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme
new file mode 100644
index 000000000..6d4c5ded4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme
new file mode 100644
index 000000000..7ae70b686
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme
new file mode 100644
index 000000000..ae150d948
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme
new file mode 100644
index 000000000..af14ce15c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme
@@ -0,0 +1,97 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/Package.swift b/Vendor/HaishinKit.swift/Examples/Package.swift
new file mode 100644
index 000000000..47e7f05fc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Package.swift
@@ -0,0 +1,11 @@
+// swift-tools-version:5.10
+
+// Leave blank. This is only here so that Xcode doesn't display it.
+
+import PackageDescription
+
+let package = Package(
+ name: "Examples",
+ products: [],
+ targets: []
+)
diff --git a/Vendor/HaishinKit.swift/Examples/Preference.swift b/Vendor/HaishinKit.swift/Examples/Preference.swift
new file mode 100644
index 000000000..a7abd005a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/Preference.swift
@@ -0,0 +1,17 @@
+import Foundation
+
+struct Preference: Sendable {
+ // Temp
+ static nonisolated(unsafe) var `default` = Preference()
+
+ // var uri = "http://192.168.1.14:1985/rtc/v1/whip/?app=live&stream=livestream"
+ var uri = "rtmp://192.168.1.7/live"
+ var streamName = "live"
+
+ func makeURL() -> URL? {
+ if uri.contains("rtmp://") {
+ return URL(string: uri + "/" + streamName)
+ }
+ return URL(string: uri)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json
new file mode 100644
index 000000000..eb8789700
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "colors" : [
+ {
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png
new file mode 100644
index 000000000..62e3de039
Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png differ
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 000000000..5081081c5
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,86 @@
+{
+ "images" : [
+ {
+ "filename" : "AppIcon.png",
+ "idiom" : "universal",
+ "platform" : "ios",
+ "size" : "1024x1024"
+ },
+ {
+ "appearances" : [
+ {
+ "appearance" : "luminosity",
+ "value" : "dark"
+ }
+ ],
+ "idiom" : "universal",
+ "platform" : "ios",
+ "size" : "1024x1024"
+ },
+ {
+ "appearances" : [
+ {
+ "appearance" : "luminosity",
+ "value" : "tinted"
+ }
+ ],
+ "idiom" : "universal",
+ "platform" : "ios",
+ "size" : "1024x1024"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "16x16"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "16x16"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "32x32"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "32x32"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "128x128"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "128x128"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "256x256"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "256x256"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "512x512"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "512x512"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift b/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift
new file mode 100644
index 000000000..b8a675431
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift
@@ -0,0 +1,62 @@
+import AVFoundation
+import Foundation
+import HaishinKit
+
+protocol AudioEngineCaptureDelegate: AnyObject {
+ func audioCapture(_ audioCapture: AudioEngineCapture, buffer: AVAudioPCMBuffer, time: AVAudioTime)
+}
+
+final class AudioEngineCapture {
+ var delegate: (any AudioEngineCaptureDelegate)?
+
+ private(set) var isRunning = false
+ private var audioEngine = AVAudioEngine()
+
+ func startCaptureIfNeeded() {
+ guard isRunning else {
+ return
+ }
+ audioEngine.stop()
+ audioEngine.inputNode.removeTap(onBus: 0)
+ audioEngine = AVAudioEngine()
+ do {
+ try startCapture()
+ } catch {
+ logger.warn(error)
+ }
+ }
+
+ private func startCapture() throws {
+ let input = audioEngine.inputNode
+ let mixer = audioEngine.mainMixerNode
+ audioEngine.connect(input, to: mixer, format: input.inputFormat(forBus: 0))
+ input.installTap(onBus: 0, bufferSize: 1024, format: input.inputFormat(forBus: 0)) { buffer, when in
+ self.delegate?.audioCapture(self, buffer: buffer, time: when)
+ }
+ audioEngine.prepare()
+ try audioEngine.start()
+ }
+}
+
+extension AudioEngineCapture: Runner {
+ // MARK: Runner
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ do {
+ try startCapture()
+ isRunning = true
+ } catch {
+ logger.error(error)
+ }
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ audioEngine.stop()
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift b/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift
new file mode 100644
index 000000000..dffd10c77
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift
@@ -0,0 +1,218 @@
+@preconcurrency import AVFoundation
+import Combine
+import HaishinKit
+
+struct AudioSource: Sendable, Hashable, Equatable, CustomStringConvertible {
+ static let empty = AudioSource(portName: "", dataSourceName: "", isSupportedStereo: false)
+
+ let portName: String
+ let dataSourceName: String
+ let isSupportedStereo: Bool
+
+ var description: String {
+ if isSupportedStereo {
+ return "\(portName)(\(dataSourceName))(Stereo)"
+ }
+ return "\(portName)(\(dataSourceName))(Mono)"
+ }
+}
+
+actor AudioSourceService {
+ enum Error: Swift.Error {
+ case missingDataSource(_ source: AudioSource)
+ }
+
+ var buffer: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> {
+ AsyncStream { continuation in
+ bufferContinuation = continuation
+ }
+ }
+
+ private(set) var mode: AudioSourceServiceMode = .audioEngine
+ private(set) var isRunning = false
+ private(set) var sources: [AudioSource] = [] {
+ didSet {
+ guard sources != oldValue else {
+ return
+ }
+ continuation?.yield(sources)
+ }
+ }
+ private let session = AVAudioSession.sharedInstance()
+ private var continuation: AsyncStream<[AudioSource]>.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+ private var tasks: [Task] = []
+ private var audioEngineCapture: AudioEngineCapture? {
+ didSet {
+ audioEngineCapture?.delegate = self
+ }
+ }
+ private var bufferContinuation: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)>.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+
+ func setUp(_ mode: AudioSourceServiceMode) {
+ self.mode = mode
+ do {
+ let session = AVAudioSession.sharedInstance()
+ // If you set the "mode" parameter, stereo capture is not possible, so it is left unspecified.
+ try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetoothHFP])
+ try session.setActive(true)
+ // It looks like this setting is required on iOS 18.5.
+ try? session.setPreferredInputNumberOfChannels(2)
+ } catch {
+ logger.error(error)
+ }
+ }
+
+ func sourcesUpdates() -> AsyncStream<[AudioSource]> {
+ AsyncStream { continuation in
+ self.continuation = continuation
+ continuation.yield(sources)
+ }
+ }
+
+ func selectAudioSource(_ audioSource: AudioSource) throws {
+ setPreferredInputBuiltInMic(true)
+ guard let preferredInput = AVAudioSession.sharedInstance().preferredInput,
+ let dataSources = preferredInput.dataSources,
+ let newDataSource = dataSources.first(where: { $0.dataSourceName == audioSource.dataSourceName }),
+ let supportedPolarPatterns = newDataSource.supportedPolarPatterns else {
+ throw Error.missingDataSource(audioSource)
+ }
+ do {
+ let isStereoSupported = supportedPolarPatterns.contains(.stereo)
+ if isStereoSupported {
+ try newDataSource.setPreferredPolarPattern(.stereo)
+ }
+ try preferredInput.setPreferredDataSource(newDataSource)
+ } catch {
+ logger.warn(error)
+ }
+ }
+
+ private func makeAudioSources() -> [AudioSource] {
+ if session.inputDataSources?.isEmpty == true {
+ setPreferredInputBuiltInMic(false)
+ } else {
+ setPreferredInputBuiltInMic(true)
+ }
+ guard let preferredInput = session.preferredInput else {
+ return []
+ }
+ var sources: [AudioSource] = []
+ for dataSource in session.preferredInput?.dataSources ?? [] {
+ sources.append(.init(
+ portName: preferredInput.portName,
+ dataSourceName: dataSource.dataSourceName,
+ isSupportedStereo: dataSource.supportedPolarPatterns?.contains(.stereo) ?? false
+ ))
+ }
+ return sources
+ }
+
+ private func setPreferredInputBuiltInMic(_ isEnabled: Bool) {
+ do {
+ if isEnabled {
+ guard let availableInputs = session.availableInputs,
+ let builtInMicInput = availableInputs.first(where: { $0.portType == .builtInMic }) else {
+ return
+ }
+ try session.setPreferredInput(builtInMicInput)
+ } else {
+ try session.setPreferredInput(nil)
+ }
+ } catch {
+ logger.warn(error)
+ }
+ }
+}
+
+extension AudioSourceService: AsyncRunner {
+ // MARK: AsyncRunner
+ func startRunning() async {
+ guard !isRunning else {
+ return
+ }
+ switch mode {
+ case .audioSource:
+ break
+ case .audioSourceWithStereo:
+ sources = makeAudioSources()
+ tasks.append(Task {
+ for await reason in NotificationCenter.default.notifications(named: AVAudioSession.routeChangeNotification)
+ .compactMap({ $0.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt })
+ .compactMap({ AVAudioSession.RouteChangeReason(rawValue: $0) }) {
+ logger.info("route change ->", reason.rawValue)
+ sources = makeAudioSources()
+ }
+ })
+ case .audioEngine:
+ audioEngineCapture = AudioEngineCapture()
+ audioEngineCapture?.startRunning()
+ tasks.append(Task {
+ for await reason in NotificationCenter.default.notifications(named: AVAudioSession.routeChangeNotification)
+ .compactMap({ $0.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt })
+ .compactMap({ AVAudioSession.RouteChangeReason(rawValue: $0) }) {
+ // There are cases where it crashes when executed in situations other than attaching or detaching earphones. https://github.com/HaishinKit/HaishinKit.swift/issues/1863
+ switch reason {
+ case .newDeviceAvailable, .oldDeviceUnavailable:
+ audioEngineCapture?.startCaptureIfNeeded()
+ default: ()
+ }
+ }
+ })
+ tasks.append(Task {
+ for await notification in NotificationCenter.default.notifications(
+ named: AVAudioSession.interruptionNotification,
+ object: AVAudioSession.sharedInstance()
+ ) {
+ guard
+ let userInfo = notification.userInfo,
+ let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
+ let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
+ return
+ }
+ switch type {
+ case .began:
+ logger.info("interruption began", notification)
+ case .ended:
+ logger.info("interruption end", notification)
+ let optionsValue =
+ userInfo[AVAudioSessionInterruptionOptionKey] as? UInt
+ let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue ?? 0)
+ if options.contains(.shouldResume) {
+ audioEngineCapture?.startCaptureIfNeeded()
+ }
+ default: ()
+ }
+ }
+ })
+ }
+ isRunning = true
+ }
+
+ func stopRunning() async {
+ guard isRunning else {
+ return
+ }
+ audioEngineCapture?.stopRunning()
+ tasks.forEach { $0.cancel() }
+ tasks.removeAll()
+ isRunning = false
+ }
+}
+
+extension AudioSourceService: AudioEngineCaptureDelegate {
+ // MARK: AudioEngineCaptureDelegate
+ nonisolated func audioCapture(_ audioCapture: AudioEngineCapture, buffer: AVAudioPCMBuffer, time: AVAudioTime) {
+ Task {
+ await bufferContinuation?.yield((buffer, time))
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift
new file mode 100644
index 000000000..e0e2608c6
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift
@@ -0,0 +1,33 @@
+import SwiftUI
+
+struct ContentView: View {
+ var body: some View {
+ TabView {
+ PreferenceView()
+ .tabItem {
+ Image(systemName: "person.circle")
+ Text("Preference")
+ }
+
+ PublishView()
+ .tabItem {
+ Image(systemName: "record.circle")
+ Text("Publish")
+ }
+
+ if #available(iOS 17.0, *), UIDevice.current.userInterfaceIdiom == .pad {
+ UVCView()
+ .tabItem {
+ Image(systemName: "record.circle")
+ Text("UVC Camera")
+ }
+ }
+
+ PlaybackView()
+ .tabItem {
+ Image(systemName: "play.circle")
+ Text("Playback")
+ }
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements
new file mode 100644
index 000000000..f2ef3ae02
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements
@@ -0,0 +1,10 @@
+
+
+
+
+ com.apple.security.app-sandbox
+
+ com.apple.security.files.user-selected.read-only
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift
new file mode 100644
index 000000000..5265b724a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift
@@ -0,0 +1,65 @@
+import HaishinKit
+@preconcurrency import Logboard
+import RTCHaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+import SwiftUI
+
+nonisolated let logger = LBLogger.with("com.haishinkit.HaishinApp")
+
+@main
+struct HaishinApp: App {
+ @State private var preference = PreferenceViewModel()
+ @State private var isInitialized = false
+
+ var body: some Scene {
+ WindowGroup {
+ if isInitialized {
+ ContentView()
+ .environmentObject(preference)
+ } else {
+ LaunchScreen()
+ .task {
+ await initialize()
+ isInitialized = true
+ }
+ }
+ }
+ }
+
+ private func initialize() async {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+ await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+
+ await RTCLogger.shared.setLevel(.debug)
+ await SRTLogger.shared.setLevel(.debug)
+ }
+
+ init() {
+ LBLogger(kHaishinKitIdentifier).level = .debug
+ LBLogger(kRTCHaishinKitIdentifier).level = .debug
+ LBLogger(kRTMPHaishinKitIdentifier).level = .debug
+ LBLogger(kSRTHaishinKitIdentifier).level = .debug
+ }
+}
+
+struct LaunchScreen: View {
+ var body: some View {
+ ZStack {
+ Color.black.ignoresSafeArea()
+ VStack(spacing: 20) {
+ Image(systemName: "video.fill")
+ .font(.system(size: 60))
+ .foregroundColor(.white)
+ Text("HaishinKit")
+ .font(.title)
+ .fontWeight(.bold)
+ .foregroundColor(.white)
+ ProgressView()
+ .tint(.white)
+ .padding(.top, 20)
+ }
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Info.plist b/Vendor/HaishinKit.swift/Examples/iOS/Info.plist
new file mode 100644
index 000000000..515c9376f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Info.plist
@@ -0,0 +1,11 @@
+
+
+
+
+ UIBackgroundModes
+
+ audio
+ voip
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift b/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift
new file mode 100644
index 000000000..cd7b5db77
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift
@@ -0,0 +1,151 @@
+import SwiftUI
+
+private enum InfoTab: String, CaseIterable {
+ case preference = "Preference"
+ case publish = "Publish"
+}
+
+struct InfoGuideView: View {
+ @Binding var showingInfo: Bool
+ @State private var selectedTab: InfoTab = .preference
+
+ var body: some View {
+ NavigationView {
+ VStack(spacing: 0) {
+ Picker("", selection: $selectedTab) {
+ ForEach(InfoTab.allCases, id: \.self) { tab in
+ Text(tab.rawValue).tag(tab)
+ }
+ }
+ .pickerStyle(.segmented)
+ .padding()
+ .padding(.top, 8)
+
+ TabView(selection: $selectedTab) {
+ PreferenceGuideList()
+ .tag(InfoTab.preference)
+ PublishGuideList()
+ .tag(InfoTab.publish)
+ }
+ .tabViewStyle(.page(indexDisplayMode: .never))
+ }
+ .navigationTitle("Help")
+ .navigationBarTitleDisplayMode(.inline)
+ .toolbar {
+ ToolbarItem(placement: .navigationBarTrailing) {
+ Button("Done") { showingInfo = false }
+ }
+ }
+ }
+ }
+}
+
+private struct PreferenceGuideList: View {
+ var body: some View {
+ List {
+ Section("Stream Settings") {
+ GuideRow(title: "URL", description: "RTMP server address (e.g., rtmp://your-server.com/live)")
+ GuideRow(title: "Stream Name", description: "Unique stream key provided by your streaming platform")
+ }
+ Section("Audio Settings") {
+ GuideRow(title: "Format", description: "AAC: Universal compatibility\nOpus: Better quality at low bitrates")
+ }
+ Section("Video Settings") {
+ GuideRow(title: "HDR Video", description: "Captures wider color/brightness range. Requires HDR-capable camera.")
+ GuideRow(title: "Low Latency", description: "Reduces stream delay to ~2-3 seconds. May slightly reduce quality.")
+ GuideRow(title: "BitRate Mode", description: "Average: Consistent file size\nConstant: Stable quality\nVariable: Best quality")
+ }
+ Section("Capture Settings") {
+ GuideRow(title: "Preview Type", description: "Metal: Fast GPU-based preview.\nSystem PiP: Enables background streaming. When you switch apps, receive a phone call, or go to home screen, your stream continues in a floating window instead of dying.")
+ GuideRow(title: "Audio Capture", description: "AudioEngine: Most stable\nAudioSource: Direct capture\nStereo: For external mics")
+ GuideRow(title: "GPU Rendering", description: "Uses GPU for video effects. Disable if experiencing issues.")
+ }
+ Section("Debug") {
+ GuideRow(title: "Memory Release Test", description: "Opens PublishView in a sheet to verify memory is properly released when dismissed to help detect memory leaks.")
+ }
+ }
+ }
+}
+
+private struct PublishGuideList: View {
+ var body: some View {
+ List {
+ Section("Stream Settings") {
+ GuideRowWithIcon(icon: "15", isText: true, title: "FPS",
+ description: "Frames per second. 15 saves battery, 30 is standard, 60 is ultra-smooth.")
+ GuideRowWithIcon(icon: "slider.horizontal.3", title: "Bitrate (kbps)",
+ description: "Video quality. Higher = better but more data. 1500-2500 recommended.")
+ GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p",
+ description: "Video resolution (1280×720). Good balance of quality and performance.")
+ }
+ Section("Controls") {
+ GuideRowWithIcon(icon: "record.circle", title: "Record",
+ description: "Save a local copy to Photos. Only available while streaming.")
+ GuideRowWithIcon(icon: "mic.fill", title: "Mute",
+ description: "Mute/unmute microphone. Red when muted.")
+ GuideRowWithIcon(icon: "arrow.triangle.2.circlepath.camera", title: "Flip Camera",
+ description: "Switch between front and back cameras.")
+ GuideRowWithIcon(icon: "flashlight.on.fill", title: "Torch",
+ description: "Toggle flashlight. Only works with back camera.")
+ GuideRowWithIcon(icon: "rectangle.on.rectangle", title: "Dual Camera",
+ description: "Overlay the other camera in your stream. Viewers see both cameras.")
+ }
+ Section("Live Stats") {
+ GuideRowWithIcon(icon: "arrow.up", title: "Upload Speed",
+ description: "Current upload rate in KB/s. The graph shows last 60 seconds.")
+ GuideRowWithIcon(icon: "thermometer.medium", title: "Temperature",
+ description: "Device thermal state. Lower FPS/bitrate if too hot.")
+ }
+ }
+ }
+}
+
+private struct GuideRow: View {
+ let title: String
+ let description: String
+
+ var body: some View {
+ VStack(alignment: .leading, spacing: 4) {
+ Text(title).font(.headline)
+ Text(description)
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+ .padding(.vertical, 2)
+ }
+}
+
+private struct GuideRowWithIcon: View {
+ let icon: String
+ var isText: Bool = false
+ let title: String
+ let description: String
+
+ var body: some View {
+ HStack(alignment: .top, spacing: 12) {
+ if isText {
+ Text(icon)
+ .font(.system(size: 14, weight: .bold))
+ .foregroundColor(.cyan)
+ .frame(width: 28, height: 28)
+ .background(Color.cyan.opacity(0.2))
+ .cornerRadius(6)
+ } else {
+ Image(systemName: icon)
+ .font(.system(size: 16))
+ .foregroundColor(.cyan)
+ .frame(width: 28, height: 28)
+ .background(Color.cyan.opacity(0.2))
+ .cornerRadius(6)
+ }
+ VStack(alignment: .leading, spacing: 2) {
+ Text(title)
+ .font(.subheadline.weight(.medium))
+ Text(description)
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+ }
+ .padding(.vertical, 4)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift
new file mode 100644
index 000000000..05704311b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift
@@ -0,0 +1,112 @@
+import AVFoundation
+import HaishinKit
+import SwiftUI
+
+struct PlaybackView: View {
+ @EnvironmentObject var preference: PreferenceViewModel
+ @StateObject private var model = PlaybackViewModel()
+
+ var body: some View {
+ ZStack {
+ VStack {
+ switch preference.viewType {
+ case .metal:
+ MTHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill)
+ case .pip:
+ PiPHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill)
+ }
+ }
+
+ VStack {
+ Spacer()
+
+ if model.hasError {
+ VStack(spacing: 16) {
+ Image(systemName: "tv.slash")
+ .font(.system(size: 48))
+ .foregroundColor(.white.opacity(0.7))
+
+ Text("Can't connect to stream")
+ .font(.headline)
+ .foregroundColor(.white)
+
+ Text(model.friendlyErrorMessage)
+ .font(.subheadline)
+ .foregroundColor(.white.opacity(0.8))
+ .multilineTextAlignment(.center)
+ .padding(.horizontal, 32)
+
+ Button(action: {
+ model.dismissError()
+ }) {
+ Text("Try Again")
+ .font(.subheadline.bold())
+ .foregroundColor(.white)
+ .padding(.horizontal, 24)
+ .padding(.vertical, 12)
+ .background(Color.blue)
+ .cornerRadius(8)
+ }
+ }
+ .padding(24)
+ .background(Color.black.opacity(0.8))
+ .cornerRadius(16)
+ }
+
+ Spacer()
+
+ HStack {
+ Spacer()
+ switch model.readyState {
+ case .connecting:
+ ProgressView()
+ .progressViewStyle(CircularProgressViewStyle(tint: .white))
+ .scaleEffect(1.5)
+ .frame(width: 64, height: 64)
+ .background(Color.black.opacity(0.5))
+ .cornerRadius(32)
+ .padding(16)
+ case .open:
+ Button(action: {
+ Task {
+ await model.stop()
+ }
+ }) {
+ Image(systemName: "stop.fill")
+ .foregroundColor(.white)
+ .font(.system(size: 24))
+ }
+ .frame(width: 64, height: 64)
+ .background(Color.red)
+ .cornerRadius(32)
+ .padding(16)
+ case .closed, .closing:
+ if !model.hasError {
+ Button(action: {
+ Task {
+ await model.start()
+ }
+ }) {
+ Image(systemName: "play.fill")
+ .foregroundColor(.white)
+ .font(.system(size: 24))
+ }
+ .frame(width: 64, height: 64)
+ .background(Color.blue)
+ .cornerRadius(32)
+ .padding(16)
+ }
+ }
+ }
+ }
+ }
+ .background(Color.black)
+ .task {
+ await model.makeSession()
+ }
+ }
+}
+
+#Preview {
+ PlaybackView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift
new file mode 100644
index 000000000..dcf0ee07c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift
@@ -0,0 +1,136 @@
+@preconcurrency import AVKit
+import Combine
+import HaishinKit
+@preconcurrency import Logboard
+import SwiftUI
+
+@MainActor
+final class PlaybackViewModel: ObservableObject {
+ @Published private(set) var readyState: SessionReadyState = .closed
+ @Published private(set) var error: Error?
+ @Published var hasError = false
+
+ var friendlyErrorMessage: String {
+ guard let error else {
+ return "Something went wrong. Please check your connection and try again."
+ }
+
+ let errorString = String(describing: error).lowercased()
+
+ if errorString.contains("unsupportedcommand") || errorString.contains("error 1") {
+ return "This server doesn't support watching streams directly. Most streaming servers (like Owncast) require you to watch via a web browser instead."
+ } else if errorString.contains("timeout") || errorString.contains("timedout") {
+ return "Connection timed out. The server may be offline or the stream URL might be incorrect."
+ } else if errorString.contains("invalidstate") {
+ return "Unable to connect. Please check that a stream is currently live."
+ } else if errorString.contains("connection") {
+ return "Couldn't reach the server. Check your internet connection and verify the stream URL in Preferences."
+ } else {
+ return "Unable to play this stream. The server may not support direct playback, or no stream is currently live."
+ }
+ }
+
+ func dismissError() {
+ hasError = false
+ error = nil
+ }
+
+ private var view: PiPHKView?
+ private var session: (any Session)?
+ private let audioPlayer = AudioPlayer(audioEngine: AVAudioEngine())
+ private var pictureInPictureController: AVPictureInPictureController?
+
+ func start() async {
+ guard let session else {
+ return
+ }
+ do {
+ try await session.connect {
+ Task { @MainActor in
+ self.hasError = true
+ }
+ }
+ } catch {
+ self.error = error
+ self.hasError = true
+ }
+ }
+
+ func stop() async {
+ do {
+ try await session?.close()
+ } catch {
+ logger.error(error)
+ }
+ }
+
+ func makeSession() async {
+ do {
+ session = try await SessionBuilderFactory.shared.make(Preference.default.makeURL())
+ .setMode(.playback)
+ .build()
+ await session?.setMaxRetryCount(0)
+ guard let session else {
+ return
+ }
+ if let view {
+ await session.stream.addOutput(view)
+ }
+ await session.stream.attachAudioPlayer(audioPlayer)
+ Task {
+ for await readyState in await session.readyState {
+ self.readyState = readyState
+ switch readyState {
+ case .open:
+ UIApplication.shared.isIdleTimerDisabled = false
+ default:
+ UIApplication.shared.isIdleTimerDisabled = true
+ }
+ }
+ }
+ } catch {
+ logger.error(error)
+ }
+ }
+}
+
+extension PlaybackViewModel: MTHKViewRepresentable.PreviewSource {
+ // MARK: MTHKViewRepresentable.PreviewSource
+ nonisolated func connect(to view: MTHKView) {
+ Task { @MainActor in
+ }
+ }
+}
+
+extension PlaybackViewModel: PiPHKViewRepresentable.PreviewSource {
+ // MARK: PiPHKSwiftUiView.PreviewSource
+ nonisolated func connect(to view: HaishinKit.PiPHKView) {
+ Task { @MainActor in
+ self.view = view
+ if pictureInPictureController == nil {
+ pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: view.layer, playbackDelegate: PlaybackDelegate()))
+ }
+ }
+ }
+}
+
+final class PlaybackDelegate: NSObject, AVPictureInPictureSampleBufferPlaybackDelegate {
+ // MARK: AVPictureInPictureControllerDelegate
+ func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, setPlaying playing: Bool) {
+ }
+
+ func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
+ return CMTimeRange(start: .zero, duration: .positiveInfinity)
+ }
+
+ func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
+ return false
+ }
+
+ func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
+ }
+
+ func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime, completion completionHandler: @escaping () -> Void) {
+ completionHandler()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift
new file mode 100644
index 000000000..8cb7de1da
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift
@@ -0,0 +1,118 @@
+import HaishinKit
+import SwiftUI
+
+struct InfoRow: View {
+ let title: String
+ let info: String
+
+ var body: some View {
+ HStack {
+ Text(title)
+ Spacer()
+ Image(systemName: "info.circle")
+ .foregroundColor(.blue)
+ }
+ .contentShape(Rectangle())
+ }
+}
+
+struct PreferenceView: View {
+ @EnvironmentObject var model: PreferenceViewModel
+ @State private var showingInfo = false
+
+ var body: some View {
+ Form {
+ Section {
+ HStack {
+ Text("URL")
+ .frame(width: 80, alignment: .leading)
+ .foregroundColor(.secondary)
+ TextField(Preference.default.uri, text: $model.uri)
+ }.padding(.vertical, 4)
+ HStack {
+ Text("Name")
+ .frame(width: 80, alignment: .leading)
+ .foregroundColor(.secondary)
+ TextField(Preference.default.streamName, text: $model.streamName)
+ }.padding(.vertical, 4)
+ } header: {
+ HStack {
+ Text("Stream")
+ Spacer()
+ Button(action: { showingInfo = true }) {
+ Image(systemName: "info.circle")
+ .font(.system(size: 22))
+ .foregroundColor(.blue)
+ }
+ }
+ }
+ Section {
+ Picker("Format", selection: $model.audioFormat) {
+ ForEach(AudioCodecSettings.Format.allCases, id: \.self) { format in
+ Text(String(describing: format)).tag(format)
+ }
+ }
+ } header: {
+ Text("Audio Codec Settings")
+ } footer: {
+ Text("AAC is widely supported. Opus offers better quality at low bitrates.")
+ }
+ Section {
+ Toggle(isOn: $model.isHDREnabled) {
+ Text("HDR Video")
+ }
+ Toggle(isOn: $model.isLowLatencyRateControlEnabled) {
+ Text("Low Latency Mode")
+ }
+ Picker("BitRate Mode", selection: $model.bitRateMode) {
+ ForEach(model.bitRateModes, id: \.description) { index in
+ Text(index.description).tag(index)
+ }
+ }
+ } header: {
+ Text("Video Codec Settings")
+ } footer: {
+ Text("HDR captures wider color range. Low latency reduces delay but may affect quality. Average bitrate is recommended for most streams.")
+ }
+ Section {
+ Picker("Preview Type", selection: $model.viewType) {
+ ForEach(ViewType.allCases, id: \.self) { type in
+ Text(type.displayName).tag(type)
+ }
+ }
+ Picker("Audio Capture", selection: $model.audioCaptureMode) {
+ ForEach(AudioSourceServiceMode.allCases, id: \.self) { view in
+ Text(String(describing: view)).tag(view)
+ }
+ }
+ Toggle(isOn: $model.isGPURendererEnabled) {
+ Text("GPU Rendering")
+ }
+ } header: {
+ Text("Capture Settings")
+ } footer: {
+ Text("Metal preview is faster. AudioEngine mode is recommended for stability.")
+ }
+ Section {
+ Button(action: {
+ model.showPublishSheet.toggle()
+ }, label: {
+ Text("Memory release test for PublishView")
+ }).sheet(isPresented: $model.showPublishSheet, content: {
+ PublishView()
+ })
+ } header: {
+ Text("Debug")
+ }
+ }
+ #if os(iOS)
+ .sheet(isPresented: $showingInfo) {
+ InfoGuideView(showingInfo: $showingInfo)
+ }
+ #endif
+ }
+}
+
+#Preview {
+ PreferenceView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift
new file mode 100644
index 000000000..3356ddd1c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift
@@ -0,0 +1,173 @@
+import Combine
+import HaishinKit
+import SwiftUI
+
+enum ViewType: String, CaseIterable, Identifiable {
+ case metal
+ case pip
+
+ var id: Self { self }
+
+ var displayName: String {
+ switch self {
+ case .metal: return "Metal"
+ case .pip: return "System PiP"
+ }
+ }
+}
+
+enum AudioSourceServiceMode: String, CaseIterable, Sendable {
+ case audioSource
+ case audioSourceWithStereo
+ case audioEngine
+}
+
+@MainActor
+final class PreferenceViewModel: ObservableObject {
+ private enum Keys {
+ static let uri = "pref_stream_uri"
+ static let streamName = "pref_stream_name"
+ static let audioFormat = "pref_audio_format"
+ static let bitRateMode = "pref_bitrate_mode"
+ static let isLowLatencyEnabled = "pref_low_latency"
+ static let viewType = "pref_view_type"
+ static let isGPURendererEnabled = "pref_gpu_renderer"
+ static let audioCaptureMode = "pref_audio_capture_mode"
+ static let isDualCameraEnabled = "pref_dual_camera"
+ static let isHDREnabled = "pref_hdr_enabled"
+ }
+
+ @Published var showPublishSheet: Bool = false
+
+ @Published var uri: String {
+ didSet {
+ UserDefaults.standard.set(uri, forKey: Keys.uri)
+ }
+ }
+ @Published var streamName: String {
+ didSet {
+ UserDefaults.standard.set(streamName, forKey: Keys.streamName)
+ }
+ }
+
+ private(set) var bitRateModes: [VideoCodecSettings.BitRateMode] = [.average]
+
+ // MARK: - AudioCodecSettings.
+ @Published var audioFormat: AudioCodecSettings.Format = .aac {
+ didSet {
+ UserDefaults.standard.set(audioFormat.rawValue, forKey: Keys.audioFormat)
+ }
+ }
+
+ // MARK: - VideoCodecSettings.
+ @Published var bitRateMode: VideoCodecSettings.BitRateMode = .average {
+ didSet {
+ UserDefaults.standard.set(bitRateMode.description, forKey: Keys.bitRateMode)
+ }
+ }
+ @Published var isLowLatencyRateControlEnabled: Bool = false {
+ didSet {
+ UserDefaults.standard.set(isLowLatencyRateControlEnabled, forKey: Keys.isLowLatencyEnabled)
+ }
+ }
+
+ // MARK: - Others
+ @Published var viewType: ViewType = .metal {
+ didSet {
+ UserDefaults.standard.set(viewType.rawValue, forKey: Keys.viewType)
+ }
+ }
+ @Published var isGPURendererEnabled: Bool = true {
+ didSet {
+ UserDefaults.standard.set(isGPURendererEnabled, forKey: Keys.isGPURendererEnabled)
+ }
+ }
+ @Published var audioCaptureMode: AudioSourceServiceMode = .audioEngine {
+ didSet {
+ UserDefaults.standard.set(audioCaptureMode.rawValue, forKey: Keys.audioCaptureMode)
+ }
+ }
+ @Published var isDualCameraEnabled: Bool = true {
+ didSet {
+ UserDefaults.standard.set(isDualCameraEnabled, forKey: Keys.isDualCameraEnabled)
+ }
+ }
+ @Published var isHDREnabled: Bool = false {
+ didSet {
+ UserDefaults.standard.set(isHDREnabled, forKey: Keys.isHDREnabled)
+ }
+ }
+
+ init() {
+ let defaults = UserDefaults.standard
+
+ self.uri = defaults.string(forKey: Keys.uri) ?? Preference.default.uri
+ self.streamName = defaults.string(forKey: Keys.streamName) ?? Preference.default.streamName
+
+ if let rawValue = defaults.string(forKey: Keys.audioFormat),
+ let format = AudioCodecSettings.Format(rawValue: rawValue) {
+ self.audioFormat = format
+ }
+
+ if let savedMode = defaults.string(forKey: Keys.bitRateMode) {
+ if savedMode == VideoCodecSettings.BitRateMode.average.description {
+ self.bitRateMode = .average
+ } else if #available(iOS 16.0, tvOS 16.0, *), savedMode == VideoCodecSettings.BitRateMode.constant.description {
+ self.bitRateMode = .constant
+ }
+ }
+
+ if defaults.object(forKey: Keys.isLowLatencyEnabled) != nil {
+ self.isLowLatencyRateControlEnabled = defaults.bool(forKey: Keys.isLowLatencyEnabled)
+ }
+
+ if let rawValue = defaults.string(forKey: Keys.viewType),
+ let type = ViewType(rawValue: rawValue) {
+ self.viewType = type
+ }
+
+ if defaults.object(forKey: Keys.isGPURendererEnabled) != nil {
+ self.isGPURendererEnabled = defaults.bool(forKey: Keys.isGPURendererEnabled)
+ }
+
+ if let rawValue = defaults.string(forKey: Keys.audioCaptureMode),
+ let mode = AudioSourceServiceMode(rawValue: rawValue) {
+ self.audioCaptureMode = mode
+ }
+
+ if defaults.object(forKey: Keys.isDualCameraEnabled) != nil {
+ self.isDualCameraEnabled = defaults.bool(forKey: Keys.isDualCameraEnabled)
+ }
+
+ if defaults.object(forKey: Keys.isHDREnabled) != nil {
+ self.isHDREnabled = defaults.bool(forKey: Keys.isHDREnabled)
+ }
+
+ if #available(iOS 16.0, tvOS 16.0, *) {
+ bitRateModes.append(.constant)
+ }
+ if #available(iOS 26.0, tvOS 26.0, macOS 26.0, *) {
+ bitRateModes.append(.variable)
+ }
+ }
+
+ func makeVideoCodecSettings(_ settings: VideoCodecSettings) -> VideoCodecSettings {
+ var newSettings = settings
+ newSettings.bitRateMode = bitRateMode
+ newSettings.isLowLatencyRateControlEnabled = isLowLatencyRateControlEnabled
+ return newSettings
+ }
+
+ func makeAudioCodecSettings(_ settings: AudioCodecSettings) -> AudioCodecSettings {
+ var newSettings = settings
+ newSettings.format = audioFormat
+ return newSettings
+ }
+
+ func makeURL() -> URL? {
+ if uri.contains("rtmp://") {
+ return URL(string: uri + "/" + streamName)
+ }
+ return URL(string: uri)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift
new file mode 100644
index 000000000..3d6c30b8b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift
@@ -0,0 +1,619 @@
+import AVFoundation
+import Charts
+import HaishinKit
+import SwiftUI
+
+enum FPS: String, CaseIterable, Identifiable {
+ case fps15 = "15"
+ case fps30 = "30"
+ case fps60 = "60"
+
+ var frameRate: Float64 {
+ switch self {
+ case .fps15:
+ return 15
+ case .fps30:
+ return 30
+ case .fps60:
+ return 60
+ }
+ }
+
+ var id: Self { self }
+}
+
+private func bitrateQuality(_ kbps: Double) -> String {
+ switch kbps {
+ case ..<1000: return "Low"
+ case 1000..<1500: return "SD"
+ case 1500..<2500: return "HD"
+ case 2500..<3500: return "High"
+ default: return "Ultra"
+ }
+}
+
+enum VideoEffectItem: String, CaseIterable, Identifiable, Sendable {
+ case none
+ case monochrome
+ case warm
+ case vivid
+
+ var id: Self { self }
+
+ var displayName: String {
+ switch self {
+ case .none: return "Normal"
+ case .monochrome: return "B&W"
+ case .warm: return "Warm"
+ case .vivid: return "Vivid"
+ }
+ }
+
+ func makeVideoEffect() -> VideoEffect? {
+ switch self {
+ case .none:
+ return nil
+ case .monochrome:
+ return MonochromeEffect()
+ case .warm:
+ return WarmEffect()
+ case .vivid:
+ return VividEffect()
+ }
+ }
+}
+
+struct StreamButton: View {
+ let readyState: SessionReadyState
+ let onStart: () -> Void
+ let onStop: () -> Void
+
+ @State private var isPulsing = false
+ @State private var countdown = 3
+ @State private var countdownTimer: Timer?
+
+ var body: some View {
+ Button(action: {
+ switch readyState {
+ case .closed:
+ onStart()
+ case .open:
+ onStop()
+ default:
+ break
+ }
+ }) {
+ ZStack {
+ if readyState == .open {
+ Circle()
+ .stroke(Color.red.opacity(0.5), lineWidth: 3)
+ .frame(width: 76, height: 76)
+ .scaleEffect(isPulsing ? 1.2 : 1.0)
+ .opacity(isPulsing ? 0 : 0.8)
+ .animation(
+ .easeInOut(duration: 1.0).repeatForever(autoreverses: false),
+ value: isPulsing
+ )
+ }
+
+ Circle()
+ .fill(buttonBackground)
+ .frame(width: 70, height: 70)
+ .shadow(color: shadowColor, radius: 8, x: 0, y: 4)
+
+ VStack(spacing: 2) {
+ switch readyState {
+ case .connecting:
+ Text("\(countdown)")
+ .font(.system(size: 28, weight: .bold))
+ .foregroundColor(.white)
+ case .closing:
+ Text("...")
+ .font(.system(size: 20, weight: .bold))
+ .foregroundColor(.white)
+ case .open:
+ Image(systemName: "stop.fill")
+ .font(.system(size: 18, weight: .bold))
+ .foregroundColor(.white)
+ Text("END")
+ .font(.system(size: 10, weight: .bold))
+ .foregroundColor(.white)
+ case .closed:
+ Image(systemName: "dot.radiowaves.left.and.right")
+ .font(.system(size: 20, weight: .semibold))
+ .foregroundColor(.white)
+ Text("GO LIVE")
+ .font(.system(size: 9, weight: .bold))
+ .foregroundColor(.white)
+ }
+ }
+ }
+ }
+ .disabled(readyState == .connecting || readyState == .closing)
+ .onAppear {
+ if readyState == .open {
+ isPulsing = true
+ }
+ }
+ .onChange(of: readyState) { newState in
+ isPulsing = (newState == .open)
+ if newState == .connecting {
+ startCountdown()
+ } else {
+ stopCountdown()
+ }
+ }
+ .onDisappear {
+ stopCountdown()
+ }
+ }
+
+ private func startCountdown() {
+ countdown = 3
+ countdownTimer?.invalidate()
+ countdownTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { _ in
+ if countdown > 1 {
+ countdown -= 1
+ }
+ }
+ }
+
+ private func stopCountdown() {
+ countdownTimer?.invalidate()
+ countdownTimer = nil
+ countdown = 3
+ }
+
+ private var buttonBackground: LinearGradient {
+ switch readyState {
+ case .open:
+ return LinearGradient(
+ colors: [Color.red, Color.red.opacity(0.8)],
+ startPoint: .top,
+ endPoint: .bottom
+ )
+ case .connecting, .closing:
+ return LinearGradient(
+ colors: [Color.orange, Color.orange.opacity(0.8)],
+ startPoint: .top,
+ endPoint: .bottom
+ )
+ case .closed:
+ return LinearGradient(
+ colors: [Color.green, Color.green.opacity(0.7)],
+ startPoint: .top,
+ endPoint: .bottom
+ )
+ }
+ }
+
+ private var shadowColor: Color {
+ switch readyState {
+ case .open:
+ return Color.red.opacity(0.5)
+ case .connecting, .closing:
+ return Color.orange.opacity(0.5)
+ case .closed:
+ return Color.green.opacity(0.5)
+ }
+ }
+}
+
+private func formatDuration(_ duration: TimeInterval) -> String {
+ let hours = Int(duration) / 3600
+ let minutes = (Int(duration) % 3600) / 60
+ let seconds = Int(duration) % 60
+ if hours > 0 {
+ return String(format: "%d:%02d:%02d", hours, minutes, seconds)
+ }
+ return String(format: "%d:%02d", minutes, seconds)
+}
+
+private func thermalStateText(_ state: ProcessInfo.ThermalState) -> String {
+ switch state {
+ case .nominal: return "Cool"
+ case .fair: return "Warm"
+ case .serious: return "Hot"
+ case .critical: return "Critical"
+ @unknown default: return "Unknown"
+ }
+}
+
+private func thermalStateColor(_ state: ProcessInfo.ThermalState) -> Color {
+ switch state {
+ case .nominal: return .green
+ case .fair: return .yellow
+ case .serious: return .orange
+ case .critical: return .red
+ @unknown default: return .white
+ }
+}
+
+struct StatusBadge: View {
+ let text: String
+ let color: Color
+ var textColor: Color = .white
+
+ var body: some View {
+ Text(text)
+ .font(.system(size: 10, weight: .bold))
+ .foregroundColor(textColor)
+ .padding(.horizontal, 6)
+ .padding(.vertical, 3)
+ .background(color)
+ .cornerRadius(4)
+ }
+}
+
+struct SmallIconButton: View {
+ let icon: String
+ let color: Color
+ let action: () -> Void
+
+ var body: some View {
+ Button(action: action) {
+ Image(systemName: icon)
+ .font(.system(size: 20))
+ .foregroundColor(color)
+ .frame(width: 44, height: 44)
+ .background(Color.black.opacity(0.3))
+ .cornerRadius(22)
+ }
+ }
+}
+
+struct PublishView: View {
+ @Environment(\.horizontalSizeClass) private var horizontalSizeClass
+ @EnvironmentObject var preference: PreferenceViewModel
+ @StateObject private var model = PublishViewModel()
+ @State private var showFilterHint = true
+ @State private var showFilterChange = false
+ @State private var filterChangeId = 0
+
+ var body: some View {
+ ZStack {
+ VStack {
+ switch preference.viewType {
+ case .metal:
+ MTHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill)
+ case .pip:
+ PiPHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill)
+ }
+ }
+
+ if model.isLoading {
+ Color.black.opacity(0.6)
+ .ignoresSafeArea()
+ VStack(spacing: 16) {
+ ProgressView()
+ .progressViewStyle(CircularProgressViewStyle(tint: .white))
+ .scaleEffect(1.5)
+ Text("Loading Camera...")
+ .font(.headline)
+ .foregroundColor(.white)
+ }
+ }
+
+ if showFilterHint && !model.isLoading {
+ VStack(spacing: 10) {
+ VStack(spacing: 8) {
+ HStack(spacing: 16) {
+ Image(systemName: "chevron.left")
+ Text(model.visualEffectItem.displayName)
+ .font(.system(size: 14, weight: .medium))
+ Image(systemName: "chevron.right")
+ }
+ Text("Swipe to change filter")
+ .font(.system(size: 11))
+ .foregroundColor(.white.opacity(0.7))
+ }
+ .foregroundColor(.white)
+ .padding(.horizontal, 20)
+ .padding(.vertical, 12)
+ .background(Color.black.opacity(0.35))
+ .cornerRadius(12)
+ HStack(spacing: 6) {
+ ForEach(VideoEffectItem.allCases) { effect in
+ Circle()
+ .fill(effect == model.visualEffectItem ? Color.white : Color.white.opacity(0.4))
+ .frame(width: 6, height: 6)
+ }
+ }
+ }
+ .transition(.opacity)
+ .onAppear {
+ DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
+ withAnimation(.easeOut(duration: 0.5)) {
+ showFilterHint = false
+ }
+ }
+ }
+ }
+
+ if !showFilterHint {
+ VStack(spacing: 10) {
+ Text(model.visualEffectItem.displayName)
+ .font(.system(size: 18, weight: .semibold))
+ .foregroundColor(.white)
+ .padding(.horizontal, 24)
+ .padding(.vertical, 14)
+ .background(Color.black.opacity(0.35))
+ .cornerRadius(12)
+ HStack(spacing: 6) {
+ ForEach(VideoEffectItem.allCases) { effect in
+ Circle()
+ .fill(effect == model.visualEffectItem ? Color.white : Color.white.opacity(0.4))
+ .frame(width: 6, height: 6)
+ }
+ }
+ }
+ .opacity(showFilterChange ? 1 : 0)
+ .animation(.easeOut(duration: 0.3), value: showFilterChange)
+ }
+
+ VStack(spacing: 0) {
+ HStack(alignment: .top) {
+ VStack(alignment: .leading, spacing: 8) {
+ if model.readyState == .open {
+ HStack(spacing: 6) {
+ Circle()
+ .fill(Color.red)
+ .frame(width: 10, height: 10)
+ Text(formatDuration(model.streamDuration))
+ .font(.system(size: 16, weight: .bold, design: .monospaced))
+ .foregroundColor(.white)
+ }
+ .padding(.horizontal, 10)
+ .padding(.vertical, 6)
+ .background(Color.black.opacity(0.6))
+ .cornerRadius(8)
+ }
+
+ if !model.isLoading {
+ Text("720p")
+ .font(.system(size: 10, weight: .medium))
+ .foregroundColor(.white)
+ .padding(.horizontal, 8)
+ .padding(.vertical, 4)
+ .background(Color.black.opacity(0.6))
+ .cornerRadius(4)
+ }
+
+ if !model.audioSources.isEmpty {
+ Picker("AudioSource", selection: $model.audioSource) {
+ ForEach(model.audioSources, id: \.description) { source in
+ Text(source.description).tag(source)
+ }
+ }
+ .frame(width: 180)
+ .background(Color.black.opacity(0.4))
+ .cornerRadius(8)
+ }
+ }
+
+ Spacer()
+
+ VStack(alignment: .trailing, spacing: 8) {
+ HStack(spacing: 6) {
+ if model.readyState == .open {
+ StatusBadge(text: "LIVE", color: .red)
+ }
+ if model.isRecording {
+ StatusBadge(text: "REC", color: .orange)
+ }
+ if preference.isHDREnabled {
+ StatusBadge(text: "HDR", color: .purple)
+ }
+ if model.isAudioMuted {
+ StatusBadge(text: "MUTED", color: .gray)
+ }
+ if model.isTorchEnabled {
+ StatusBadge(text: "TORCH", color: .yellow, textColor: .black)
+ }
+ if model.visualEffectItem != .none {
+ StatusBadge(text: model.visualEffectItem.displayName.uppercased(), color: .cyan)
+ }
+ }
+
+ if model.isVolumeOn {
+ Text("Volume up causes echo")
+ .font(.system(size: 10))
+ .foregroundColor(.white)
+ .padding(.horizontal, 6)
+ .padding(.vertical, 3)
+ .background(Color.red.opacity(0.8))
+ .cornerRadius(4)
+ }
+ }
+ }
+ .padding(16)
+
+ Spacer()
+
+ VStack(spacing: 10) {
+ if model.readyState == .open && !model.stats.isEmpty {
+ HStack(spacing: 8) {
+ HStack(spacing: 3) {
+ Image(systemName: "arrow.up")
+ .font(.system(size: 9, weight: .bold))
+ Text("\(model.currentUploadKBps)")
+ .font(.system(size: 11, weight: .bold, design: .monospaced))
+ Text("KB/s")
+ .font(.system(size: 8))
+ .foregroundColor(.white.opacity(0.6))
+ }
+
+ Chart(model.stats) {
+ LineMark(
+ x: .value("time", $0.date),
+ y: .value("bytes", $0.currentBytesOutPerSecond)
+ )
+ .foregroundStyle(Color.cyan)
+ .lineStyle(StrokeStyle(lineWidth: 1.5))
+ }
+ .chartYAxis(.hidden)
+ .chartXAxis(.hidden)
+ .frame(height: 28)
+
+ HStack(spacing: 3) {
+ Image(systemName: "thermometer.medium")
+ .font(.system(size: 9))
+ Text(thermalStateText(model.thermalState))
+ .font(.system(size: 10, weight: .medium))
+ .foregroundColor(thermalStateColor(model.thermalState))
+ }
+ }
+ .foregroundColor(.white)
+ .padding(.horizontal, 10)
+ .padding(.vertical, 6)
+ .background(Color.black.opacity(0.4))
+ .cornerRadius(8)
+ }
+
+ HStack(spacing: 0) {
+ HStack(spacing: 4) {
+ ForEach(FPS.allCases) { fps in
+ Button(action: {
+ model.currentFPS = fps
+ model.setFrameRate(fps.frameRate)
+ }) {
+ Text(fps.rawValue)
+ .font(.system(size: 15, weight: .semibold))
+ .foregroundColor(model.currentFPS == fps ? .white : .white.opacity(0.5))
+ .frame(width: 44, height: 44)
+ .background(model.currentFPS == fps ? Color.white.opacity(0.25) : Color.black.opacity(0.3))
+ .cornerRadius(22)
+ }
+ }
+ }
+
+ Spacer()
+
+ HStack(spacing: 6) {
+ SmallIconButton(icon: model.isRecording ? "record.circle.fill" : "record.circle",
+ color: model.isRecording ? .orange : .white) {
+ model.toggleRecording()
+ }
+ .disabled(model.readyState != .open)
+ .opacity(model.readyState == .open ? 1.0 : 0.4)
+
+ SmallIconButton(icon: model.isAudioMuted ? "mic.slash.fill" : "mic.fill",
+ color: model.isAudioMuted ? .red : .white) {
+ model.toggleAudioMuted()
+ }
+
+ SmallIconButton(icon: "arrow.triangle.2.circlepath.camera",
+ color: .white) {
+ model.flipCamera()
+ }
+
+ SmallIconButton(icon: model.isTorchEnabled ? "flashlight.on.fill" : "flashlight.off.fill",
+ color: model.isTorchEnabled ? .yellow : .white) {
+ model.toggleTorch()
+ }
+ .disabled(model.currentCamera == "Front")
+ .opacity(model.currentCamera == "Front" ? 0.4 : 1.0)
+
+ SmallIconButton(icon: model.isDualCameraEnabled ? "rectangle.on.rectangle.fill" : "rectangle.on.rectangle",
+ color: model.isDualCameraEnabled ? .cyan : .white) {
+ model.toggleDualCamera()
+ }
+ }
+ }
+
+ HStack(spacing: 12) {
+ VStack(alignment: .leading, spacing: 2) {
+ HStack(spacing: 4) {
+ Text("\(Int(model.videoBitRates))")
+ .font(.system(size: 12, weight: .semibold, design: .monospaced))
+ .foregroundColor(.white)
+ Text("kbps")
+ .font(.system(size: 9))
+ .foregroundColor(.white.opacity(0.5))
+ Text("•")
+ .foregroundColor(.white.opacity(0.3))
+ Text(bitrateQuality(model.videoBitRates))
+ .font(.system(size: 9, weight: .semibold))
+ .foregroundColor(.cyan)
+ }
+ Slider(value: $model.videoBitRates, in: 500...4000, step: 100)
+ .tint(.cyan)
+ }
+
+ StreamButton(
+ readyState: model.readyState,
+ onStart: { model.showPreLiveDialog = true },
+ onStop: { model.stopPublishing() }
+ )
+ .confirmationDialog("Ready to Go Live?", isPresented: $model.showPreLiveDialog, titleVisibility: .visible) {
+ Button("Go Live with Recording") {
+ model.startPublishing(preference, withRecording: true)
+ }
+ Button("Go Live without Recording") {
+ model.startPublishing(preference, withRecording: false)
+ }
+ Button("Cancel", role: .cancel) { }
+ } message: {
+ Text("Recording saves a copy of your stream to Photos at \(Int(model.videoBitRates)) kbps.")
+ }
+ }
+ }
+ .padding(.horizontal, 16)
+ .padding(.bottom, 16)
+ .background(
+ LinearGradient(
+ colors: [.clear, .black.opacity(0.25), .black.opacity(0.5)],
+ startPoint: .top,
+ endPoint: .bottom
+ )
+ )
+ }
+ }
+ .onAppear {
+ model.startRunning(preference)
+ }
+ .onDisappear {
+ model.stopRunning()
+ }
+ .onChange(of: horizontalSizeClass) { _ in
+ model.orientationDidChange()
+ }.alert(isPresented: $model.isShowError) {
+ Alert(
+ title: Text("Error"),
+ message: Text(String(describing: model.error)),
+ dismissButton: .default(Text("OK"))
+ )
+ }
+ .gesture(
+ DragGesture(minimumDistance: 50)
+ .onEnded { value in
+ if abs(value.translation.width) > abs(value.translation.height) {
+ let effects = VideoEffectItem.allCases
+ guard let currentIndex = effects.firstIndex(of: model.visualEffectItem) else { return }
+ let newIndex: Int
+ if value.translation.width < 0 {
+ newIndex = (currentIndex + 1) % effects.count
+ } else {
+ newIndex = (currentIndex - 1 + effects.count) % effects.count
+ }
+ let newEffect = effects[newIndex]
+ model.visualEffectItem = newEffect
+ model.setVisualEffet(newEffect)
+ filterChangeId += 1
+ showFilterChange = true
+ let currentId = filterChangeId
+ Task {
+ try? await Task.sleep(for: .milliseconds(800))
+ if filterChangeId == currentId {
+ showFilterChange = false
+ }
+ }
+ }
+ }
+ )
+ }
+}
+
+#Preview {
+ PublishView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift
new file mode 100644
index 000000000..436df3eb7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift
@@ -0,0 +1,644 @@
+import AVFoundation
+import AVKit
+import HaishinKit
+import MediaPlayer
+import Photos
+import RTCHaishinKit
+import SwiftUI
+
+@MainActor
+final class PublishViewModel: ObservableObject {
+ private enum Keys {
+ static let currentFPS = "publish_fps"
+ static let videoBitRates = "publish_bitrate"
+ }
+
+ @Published var currentFPS: FPS = .fps30 {
+ didSet {
+ UserDefaults.standard.set(currentFPS.rawValue, forKey: Keys.currentFPS)
+ }
+ }
+ @Published var visualEffectItem: VideoEffectItem = .none
+ @Published private(set) var error: Error? {
+ didSet {
+ if error != nil {
+ isShowError = true
+ }
+ }
+ }
+ @Published var isShowError = false
+ @Published var showPreLiveDialog = false
+ @Published private(set) var isAudioMuted = false
+ @Published private(set) var isTorchEnabled = false
+ @Published private(set) var readyState: SessionReadyState = .closed
+ @Published var audioSource: AudioSource = .empty {
+ didSet {
+ guard audioSource != oldValue else {
+ return
+ }
+ selectAudioSource(audioSource)
+ }
+ }
+ @Published private(set) var audioSources: [AudioSource] = []
+ @Published private(set) var isRecording = false
+ @Published private(set) var stats: [Stats] = []
+ @Published private(set) var currentCamera: String = "Back"
+ @Published private(set) var isDualCameraEnabled: Bool = false
+ @Published private(set) var isVolumeOn: Bool = false
+ @Published private(set) var isLoading: Bool = true
+ @Published private(set) var videoDimensions: String = ""
+ @Published private(set) var batteryUsed: Float = 0
+ @Published private(set) var streamDuration: TimeInterval = 0
+ @Published private(set) var thermalState: ProcessInfo.ThermalState = .nominal
+ @Published private(set) var currentUploadKBps: Int = 0
+ private var streamStartBattery: Float = 0
+ private var streamStartTime: Date?
+ private var batteryTimer: Timer?
+ private var durationTimer: Timer?
+ @Published var videoBitRates: Double = 2000 {
+ didSet {
+ UserDefaults.standard.set(videoBitRates, forKey: Keys.videoBitRates)
+ Task {
+ guard let session else {
+ return
+ }
+ var videoSettings = await session.stream.videoSettings
+ videoSettings.bitRate = Int(videoBitRates * 1000)
+ try await session.stream.setVideoSettings(videoSettings)
+ }
+ }
+ }
+ private(set) var mixer = MediaMixer()
+ private var tasks: [Task] = []
+ private var session: (any Session)?
+ private var recorder: StreamRecorder?
+ private var currentPosition: AVCaptureDevice.Position = .back
+ private var audioSourceService = AudioSourceService()
+ @ScreenActor private var videoScreenObject: VideoTrackScreenObject?
+ @ScreenActor private var currentVideoEffect: VideoEffect?
+ private var volumeObserver: NSKeyValueObservation?
+ private var mtView: MediaMixerOutput?
+ private var isMixerReady = false
+ private var pictureInPictureController: AVPictureInPictureController?
+
+ init() {
+ let defaults = UserDefaults.standard
+
+ if let rawValue = defaults.string(forKey: Keys.currentFPS),
+ let fps = FPS(rawValue: rawValue) {
+ self.currentFPS = fps
+ }
+
+ if defaults.object(forKey: Keys.videoBitRates) != nil {
+ self.videoBitRates = defaults.double(forKey: Keys.videoBitRates)
+ }
+
+ Task { @ScreenActor in
+ videoScreenObject = VideoTrackScreenObject()
+ }
+ }
+
+ func startPublishing(_ preference: PreferenceViewModel, withRecording: Bool = false) {
+ Task {
+ guard let session else {
+ return
+ }
+ stats.removeAll()
+
+ let recorder = StreamRecorder()
+ await mixer.addOutput(recorder)
+ self.recorder = recorder
+
+ if withRecording {
+ do {
+ try await recorder.startRecording()
+ isRecording = true
+ } catch {
+ self.error = error
+ logger.warn(error)
+ }
+ }
+
+ do {
+ try await session.connect {
+ Task { @MainActor in
+ self.isShowError = true
+ }
+ }
+ } catch {
+ self.error = error
+ logger.error(error)
+ }
+ }
+ }
+
+ func stopPublishing() {
+ Task {
+ if isRecording {
+ do {
+ if let videoFile = try await recorder?.stopRecording() {
+ Task.detached {
+ try await PHPhotoLibrary.shared().performChanges {
+ let creationRequest = PHAssetCreationRequest.forAsset()
+ creationRequest.addResource(with: .video, fileURL: videoFile, options: nil)
+ }
+ }
+ }
+ } catch {
+ logger.warn(error)
+ }
+ isRecording = false
+ }
+ if let recorder {
+ await mixer.removeOutput(recorder)
+ self.recorder = nil
+ }
+ do {
+ try await session?.close()
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ func toggleRecording() {
+ if isRecording {
+ Task {
+ do {
+ if let videoFile = try await recorder?.stopRecording() {
+ Task.detached {
+ try await PHPhotoLibrary.shared().performChanges {
+ let creationRequest = PHAssetCreationRequest.forAsset()
+ creationRequest.addResource(with: .video, fileURL: videoFile, options: nil)
+ }
+ }
+ }
+ } catch let error as StreamRecorder.Error {
+ switch error {
+ case .failedToFinishWriting(let error):
+ self.error = error
+ if let error {
+ logger.warn(error)
+ }
+ default:
+ self.error = error
+ logger.warn(error)
+ }
+ }
+ isRecording = false
+ }
+ } else {
+ Task {
+ guard let recorder else {
+ logger.warn("Recorder not initialized")
+ return
+ }
+ do {
+ try await recorder.startRecording()
+ isRecording = true
+ } catch {
+ self.error = error
+ logger.warn(error)
+ }
+ for await error in await recorder.error {
+ switch error {
+ case .failedToAppend(let error):
+ self.error = error
+ default:
+ self.error = error
+ }
+ break
+ }
+ }
+ }
+ }
+
+ func toggleAudioMuted() {
+ Task {
+ if isAudioMuted {
+ var settings = await mixer.audioMixerSettings
+ var track = settings.tracks[0] ?? .init()
+ track.isMuted = false
+ settings.tracks[0] = track
+ await mixer.setAudioMixerSettings(settings)
+ isAudioMuted = false
+ } else {
+ var settings = await mixer.audioMixerSettings
+ var track = settings.tracks[0] ?? .init()
+ track.isMuted = true
+ settings.tracks[0] = track
+ await mixer.setAudioMixerSettings(settings)
+ isAudioMuted = true
+ }
+ }
+ }
+
+ func makeSession(_ preference: PreferenceViewModel) async {
+ do {
+ session = try await SessionBuilderFactory.shared.make(preference.makeURL())
+ .setMode(.publish)
+ .build()
+ guard let session else {
+ return
+ }
+ var videoSettings = await session.stream.videoSettings
+ videoSettings.bitRate = Int(videoBitRates * 1000)
+ try? await session.stream.setVideoSettings(videoSettings)
+ await session.stream.setBitRateStrategy(StatsMonitor({ data in
+ Task { @MainActor in
+ self.stats.append(data)
+ if self.stats.count > 60 {
+ self.stats.removeFirst(self.stats.count - 60)
+ }
+ self.currentUploadKBps = data.currentBytesOutPerSecond / 1024
+ }
+ }))
+ await mixer.addOutput(session.stream)
+ tasks.append(Task {
+ for await readyState in await session.readyState {
+ self.readyState = readyState
+ switch readyState {
+ case .open:
+ UIApplication.shared.isIdleTimerDisabled = false
+ self.startBatteryTracking()
+ case .closed:
+ UIApplication.shared.isIdleTimerDisabled = true
+ self.stopBatteryTracking()
+ default:
+ UIApplication.shared.isIdleTimerDisabled = true
+ }
+ }
+ })
+ } catch {
+ self.error = error
+ }
+ do {
+ if let session {
+ try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings))
+ }
+ } catch {
+ self.error = error
+ }
+ do {
+ if let session {
+ try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings))
+ }
+ } catch {
+ self.error = error
+ }
+ }
+
+ func startRunning(_ preference: PreferenceViewModel) {
+ isMixerReady = false
+ isDualCameraEnabled = false
+
+ let isGPURendererEnabled = preference.isGPURendererEnabled
+
+ Task {
+ tasks.forEach { $0.cancel() }
+ tasks.removeAll()
+
+ await audioSourceService.stopRunning()
+ await mixer.stopRunning()
+ try? await mixer.attachAudio(nil)
+ try? await mixer.attachVideo(nil, track: 0)
+ try? await mixer.attachVideo(nil, track: 1)
+ if let session {
+ await mixer.removeOutput(session.stream)
+ try? await session.close()
+ }
+ session = nil
+
+ mixer = MediaMixer(captureSessionMode: .multi)
+
+ let viewType = preference.viewType
+ await mixer.configuration { session in
+ if session.isMultitaskingCameraAccessSupported && viewType == .pip {
+ session.isMultitaskingCameraAccessEnabled = true
+ logger.info("session.isMultitaskingCameraAccessEnabled")
+ }
+ }
+
+ let audioCaptureMode = preference.audioCaptureMode
+ await audioSourceService.setUp(preference.audioCaptureMode)
+ await mixer.configuration { session in
+ switch audioCaptureMode {
+ case .audioSource:
+ session.automaticallyConfiguresApplicationAudioSession = true
+ case .audioSourceWithStereo:
+ session.automaticallyConfiguresApplicationAudioSession = false
+ case .audioEngine:
+ session.automaticallyConfiguresApplicationAudioSession = true
+ }
+ }
+ await mixer.setMonitoringEnabled(DeviceUtil.isHeadphoneConnected())
+ var videoMixerSettings = await mixer.videoMixerSettings
+ videoMixerSettings.mode = .offscreen
+ await mixer.setVideoMixerSettings(videoMixerSettings)
+
+ await configureScreen(isGPURendererEnabled: isGPURendererEnabled)
+
+ let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
+ let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
+ try? await mixer.attachVideo(backCamera, track: 0) { videoUnit in
+ videoUnit.isVideoMirrored = false
+ }
+ try? await mixer.attachVideo(frontCamera, track: 1) { videoUnit in
+ videoUnit.isVideoMirrored = true
+ }
+ var videoMixerSettings2 = await mixer.videoMixerSettings
+ videoMixerSettings2.mainTrack = currentPosition == .front ? 1 : 0
+ await mixer.setVideoMixerSettings(videoMixerSettings2)
+ currentCamera = currentPosition == .front ? "Front" : "Back"
+ if audioCaptureMode == .audioSource {
+ try? await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
+ }
+ await audioSourceService.startRunning()
+ await mixer.startRunning()
+
+ isMixerReady = true
+ if let mtView {
+ await mixer.addOutput(mtView)
+ }
+
+ do {
+ if preference.isHDREnabled {
+ try await mixer.setDynamicRangeMode(.hdr)
+ } else {
+ try await mixer.setDynamicRangeMode(.sdr)
+ }
+ } catch {
+ logger.info(error)
+ }
+ await makeSession(preference)
+ let isLandscape = await UIDevice.current.orientation.isLandscape
+ await updateVideoEncoderSize(isLandscape: isLandscape)
+ let screenSize = await mixer.screen.size
+ if let session = self.session {
+ let videoSettings = await session.stream.videoSettings
+ self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)"
+ }
+ isLoading = false
+ }
+ orientationDidChange()
+ tasks.append(Task {
+ for await buffer in await audioSourceService.buffer {
+ await mixer.append(buffer.0, when: buffer.1)
+ }
+ })
+ tasks.append(Task {
+ for await sources in await audioSourceService.sourcesUpdates() {
+ audioSources = sources
+ if let first = sources.first, audioSource == .empty {
+ audioSource = first
+ }
+ }
+ })
+ startVolumeMonitoring()
+ }
+
+ @ScreenActor
+ private func configureScreen(isGPURendererEnabled: Bool) async {
+ await mixer.screen.isGPURendererEnabled = isGPURendererEnabled
+ await mixer.screen.size = .init(width: 720, height: 1280)
+ await mixer.screen.backgroundColor = UIColor.black.cgColor
+ }
+
+ private func startVolumeMonitoring() {
+ let audioSession = AVAudioSession.sharedInstance()
+ try? audioSession.setActive(true)
+ isVolumeOn = audioSession.outputVolume > 0
+ volumeObserver = audioSession.observe(\.outputVolume, options: [.new]) { [weak self] _, change in
+ Task { @MainActor in
+ if let volume = change.newValue {
+ self?.isVolumeOn = volume > 0
+ }
+ }
+ }
+ }
+
+ private func stopVolumeMonitoring() {
+ volumeObserver?.invalidate()
+ volumeObserver = nil
+ }
+
+ func stopRunning() {
+ isMixerReady = false
+ stopVolumeMonitoring()
+ Task {
+ await audioSourceService.stopRunning()
+ await mixer.stopRunning()
+ try? await mixer.attachAudio(nil)
+ try? await mixer.attachVideo(nil, track: 0)
+ try? await mixer.attachVideo(nil, track: 1)
+ if let session {
+ await mixer.removeOutput(session.stream)
+ }
+ tasks.forEach { $0.cancel() }
+ tasks.removeAll()
+ }
+ }
+
+ func flipCamera() {
+ Task {
+ var videoMixerSettings = await mixer.videoMixerSettings
+ if videoMixerSettings.mainTrack == 0 {
+ videoMixerSettings.mainTrack = 1
+ await mixer.setVideoMixerSettings(videoMixerSettings)
+ currentPosition = .front
+ currentCamera = "Front"
+ if isTorchEnabled {
+ await mixer.setTorchEnabled(false)
+ isTorchEnabled = false
+ }
+ Task { @ScreenActor in
+ videoScreenObject?.track = 0
+ }
+ } else {
+ videoMixerSettings.mainTrack = 0
+ await mixer.setVideoMixerSettings(videoMixerSettings)
+ currentPosition = .back
+ currentCamera = "Back"
+ Task { @ScreenActor in
+ videoScreenObject?.track = 1
+ }
+ }
+ }
+ }
+
+ func setVisualEffet(_ videoEffect: VideoEffectItem) {
+ Task { @ScreenActor in
+ if let currentVideoEffect {
+ _ = await mixer.screen.unregisterVideoEffect(currentVideoEffect)
+ }
+ if let videoEffect = videoEffect.makeVideoEffect() {
+ currentVideoEffect = videoEffect
+ _ = await mixer.screen.registerVideoEffect(videoEffect)
+ }
+ }
+ }
+
+ func toggleTorch() {
+ Task {
+ await mixer.setTorchEnabled(!isTorchEnabled)
+ isTorchEnabled.toggle()
+ }
+ }
+
+ func toggleDualCamera() {
+ let isEnabled = isDualCameraEnabled
+ let position = currentPosition
+ Task { @ScreenActor in
+ if isEnabled {
+ if let videoScreenObject {
+ try? await mixer.screen.removeChild(videoScreenObject)
+ }
+ await MainActor.run { isDualCameraEnabled = false }
+ } else {
+ if let videoScreenObject {
+ videoScreenObject.size = .init(width: 400, height: 224)
+ videoScreenObject.cornerRadius = 8.0
+ videoScreenObject.track = position == .front ? 0 : 1
+ videoScreenObject.verticalAlignment = .top
+ videoScreenObject.horizontalAlignment = .right
+ videoScreenObject.layoutMargin = .init(top: 32, left: 0, bottom: 0, right: 32)
+ videoScreenObject.invalidateLayout()
+ try? await mixer.screen.addChild(videoScreenObject)
+ }
+ await MainActor.run { isDualCameraEnabled = true }
+ }
+ }
+ }
+
+ func setFrameRate(_ fps: Float64) {
+ Task {
+ do {
+ try? await mixer.configuration(video: 0) { video in
+ do {
+ try video.setFrameRate(fps)
+ } catch {
+ logger.error(error)
+ }
+ }
+ try? await mixer.configuration(video: 1) { video in
+ do {
+ try video.setFrameRate(fps)
+ } catch {
+ logger.error(error)
+ }
+ }
+ try await mixer.setFrameRate(fps)
+ if var videoSettings = await session?.stream.videoSettings {
+ videoSettings.expectedFrameRate = fps
+ try? await session?.stream.setVideoSettings(videoSettings)
+ }
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ func orientationDidChange() {
+ Task { @ScreenActor in
+ await mixer.setVideoOrientation(.portrait)
+ await mixer.screen.size = .init(width: 720, height: 1280)
+ let screenSize = await mixer.screen.size
+ Task { @MainActor in
+ await self.updateVideoEncoderSize(isLandscape: false)
+ if let session = self.session {
+ let videoSettings = await session.stream.videoSettings
+ self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)"
+ } else {
+ self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height))"
+ }
+ }
+ }
+ }
+
+ private func updateVideoEncoderSize(isLandscape: Bool) async {
+ guard let session else { return }
+ var videoSettings = await session.stream.videoSettings
+ let targetSize: CGSize = isLandscape
+ ? CGSize(width: 1280, height: 720)
+ : CGSize(width: 720, height: 1280)
+ if videoSettings.videoSize != targetSize {
+ videoSettings.videoSize = targetSize
+ try? await session.stream.setVideoSettings(videoSettings)
+ }
+ }
+
+ private func startBatteryTracking() {
+ UIDevice.current.isBatteryMonitoringEnabled = true
+ streamStartBattery = UIDevice.current.batteryLevel
+ streamStartTime = Date()
+ batteryUsed = 0
+ streamDuration = 0
+
+ durationTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in
+ Task { @MainActor in
+ self?.updateDuration()
+ }
+ }
+
+ batteryTimer = Timer.scheduledTimer(withTimeInterval: 10, repeats: true) { [weak self] _ in
+ Task { @MainActor in
+ self?.updateBatteryStats()
+ }
+ }
+ }
+
+ private func stopBatteryTracking() {
+ durationTimer?.invalidate()
+ durationTimer = nil
+ batteryTimer?.invalidate()
+ batteryTimer = nil
+ updateBatteryStats()
+ }
+
+ private func updateDuration() {
+ guard let startTime = streamStartTime else { return }
+ streamDuration = Date().timeIntervalSince(startTime)
+ }
+
+ private func updateBatteryStats() {
+ let currentBattery = UIDevice.current.batteryLevel
+ if currentBattery >= 0 && streamStartBattery >= 0 {
+ batteryUsed = (streamStartBattery - currentBattery) * 100
+ }
+ thermalState = ProcessInfo.processInfo.thermalState
+ }
+
+ private func selectAudioSource(_ audioSource: AudioSource) {
+ Task {
+ try await audioSourceService.selectAudioSource(audioSource)
+ await mixer.stopCapturing()
+ try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
+ await mixer.startCapturing()
+ }
+ }
+}
+
+extension PublishViewModel: MTHKViewRepresentable.PreviewSource {
+ nonisolated func connect(to view: MTHKView) {
+ Task { @MainActor in
+ self.mtView = view
+ if isMixerReady {
+ await mixer.addOutput(view)
+ }
+ }
+ }
+}
+
+extension PublishViewModel: PiPHKViewRepresentable.PreviewSource {
+ nonisolated func connect(to view: PiPHKView) {
+ Task { @MainActor in
+ self.mtView = view
+ if isMixerReady {
+ await mixer.addOutput(view)
+ }
+ if pictureInPictureController == nil {
+ pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: view.layer, playbackDelegate: PlaybackDelegate()))
+ }
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist
new file mode 100644
index 000000000..e9367904d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist
@@ -0,0 +1,15 @@
+
+
+
+
+ NSExtension
+
+ NSExtensionPointIdentifier
+ com.apple.broadcast-services-upload
+ NSExtensionPrincipalClass
+ $(PRODUCT_MODULE_NAME).SampleHandler
+ RPBroadcastProcessMode
+ RPBroadcastProcessModeSampleBuffer
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift
new file mode 100644
index 000000000..2d02ab091
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift
@@ -0,0 +1,100 @@
+import HaishinKit
+@preconcurrency import Logboard
+import MediaPlayer
+import ReplayKit
+import RTCHaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+import VideoToolbox
+
+nonisolated let logger = LBLogger.with("com.haishinkit.Screencast")
+
+final class SampleHandler: RPBroadcastSampleHandler, @unchecked Sendable {
+ private var slider: UISlider?
+ private var session: Session?
+ private var mixer = MediaMixer(captureSessionMode: .manual, multiTrackAudioMixingEnabled: true)
+ private var needVideoConfiguration = true
+
+ override init() {
+ Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+ await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+
+ await SRTLogger.shared.setLevel(.debug)
+ await RTCLogger.shared.setLevel(.info)
+ }
+ }
+
+ override func broadcastStarted(withSetupInfo setupInfo: [String: NSObject]?) {
+ LBLogger.with(kHaishinKitIdentifier).level = .trace
+ LBLogger.with(kRTMPHaishinKitIdentifier).level = .trace
+ LBLogger.with(kSRTHaishinKitIdentifier).level = .trace
+ LBLogger.with(kRTCHaishinKitIdentifier).level = .trace
+ // mixer.audioMixerSettings.tracks[1] = .default
+ Task {
+ do {
+ session = try await SessionBuilderFactory.shared.make(Preference.default.makeURL()).build()
+ // ReplayKit is sensitive to memory, so we limit the queue to a maximum of five items.
+ var videoSetting = await mixer.videoMixerSettings
+ videoSetting.mode = .passthrough
+ await session?.stream.setVideoInputBufferCounts(5)
+ await mixer.setVideoMixerSettings(videoSetting)
+ await mixer.startRunning()
+ if let session {
+ await mixer.addOutput(session.stream)
+ try? await session.connect {
+ }
+ }
+ } catch {
+ logger.error(error)
+ }
+ }
+ // The volume of the audioApp can be obtained even when muted. A hack to synchronize with the volume.
+ DispatchQueue.main.async {
+ let volumeView = MPVolumeView(frame: CGRect.zero)
+ if let slider = volumeView.subviews.compactMap({ $0 as? UISlider }).first {
+ self.slider = slider
+ }
+ }
+ }
+
+ override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
+ switch sampleBufferType {
+ case .video:
+ Task {
+ if needVideoConfiguration, let dimensions = sampleBuffer.formatDescription?.dimensions {
+ var videoSettings = await session?.stream.videoSettings
+ videoSettings?.videoSize = .init(
+ width: CGFloat(dimensions.width),
+ height: CGFloat(dimensions.height)
+ )
+ videoSettings?.profileLevel = kVTProfileLevel_H264_Baseline_AutoLevel as String
+ if let videoSettings {
+ try? await session?.stream.setVideoSettings(videoSettings)
+ }
+ needVideoConfiguration = false
+ }
+ }
+ Task { await mixer.append(sampleBuffer) }
+ case .audioMic:
+ if sampleBuffer.dataReadiness == .ready {
+ Task { await mixer.append(sampleBuffer, track: 0) }
+ }
+ case .audioApp:
+ Task { @MainActor in
+ if let volume = slider?.value {
+ var audioMixerSettings = await mixer.audioMixerSettings
+ audioMixerSettings.tracks[1] = .default
+ audioMixerSettings.tracks[1]?.volume = volume * 0.5
+ await mixer.setAudioMixerSettings(audioMixerSettings)
+ }
+ }
+ if sampleBuffer.dataReadiness == .ready {
+ Task { await mixer.append(sampleBuffer, track: 1) }
+ }
+ @unknown default:
+ break
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift b/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift
new file mode 100644
index 000000000..91bd82785
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift
@@ -0,0 +1,36 @@
+import Foundation
+import HaishinKit
+
+struct Stats: Identifiable {
+ let date: Date
+ let currentBytesOutPerSecond: Int
+ let id: Int
+
+ init(report: NetworkMonitorReport) {
+ currentBytesOutPerSecond = report.currentBytesOutPerSecond
+ date = Date()
+ id = Int(date.timeIntervalSince1970)
+ }
+}
+
+struct StatsMonitor: StreamBitRateStrategy {
+ let mamimumVideoBitRate: Int = 0
+ let mamimumAudioBitRate: Int = 0
+
+ private let callback: @Sendable (Stats) -> Void
+
+ init(_ callback: @Sendable @escaping (Stats) -> Void) {
+ self.callback = callback
+ }
+
+ func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async {
+ switch event {
+ case .status(let report):
+ callback(Stats(report: report))
+ case .publishInsufficientBWOccured(let report):
+ callback(Stats(report: report))
+ case .reset:
+ break
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift b/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift
new file mode 100644
index 000000000..c0e992e03
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift
@@ -0,0 +1,100 @@
+import AVFoundation
+import Charts
+import HaishinKit
+import SwiftUI
+
+@available(iOS 17.0, *)
+struct UVCView: View {
+ @Environment(\.horizontalSizeClass) private var horizontalSizeClass
+ @EnvironmentObject var preference: PreferenceViewModel
+ @StateObject private var model = UVCViewModel()
+
+ var body: some View {
+ ZStack {
+ VStack {
+ if preference.viewType == .pip {
+ PiPHKViewRepresentable(previewSource: model)
+ } else {
+ MTHKViewRepresentable(previewSource: model)
+ }
+ }
+ VStack {
+ HStack(spacing: 16) {
+ Spacer()
+ Button(action: {
+ model.toggleRecording()
+ }, label: {
+ Image(systemName: model.isRecording ?
+ "recordingtape.circle.fill" :
+ "recordingtape.circle")
+ .resizable()
+ .scaledToFit()
+ .foregroundColor(.white)
+ .frame(width: 30, height: 30)
+ })
+ }
+ .frame(height: 50)
+ HStack {
+ Spacer()
+ Toggle(isOn: $model.isHDREnabled) {
+ Text("HDR")
+ }.frame(width: 120)
+ .pickerStyle(.segmented)
+ .frame(width: 150)
+ }.frame(height: 80)
+ Spacer()
+ HStack {
+ Spacer()
+ switch model.readyState {
+ case .connecting:
+ Spacer()
+ case .open:
+ Button(action: {
+ model.stopPublishing()
+ }, label: {
+ Image(systemName: "stop.circle")
+ .foregroundColor(.white)
+ .font(.system(size: 24))
+ })
+ .frame(width: 60, height: 60)
+ .background(Color.blue)
+ .cornerRadius(30.0)
+ .padding(EdgeInsets(top: 0, leading: 0, bottom: 16.0, trailing: 16.0))
+ case .closing:
+ Spacer()
+ case .closed:
+ Button(action: {
+ model.startPublishing(preference)
+ }, label: {
+ Image(systemName: "record.circle")
+ .foregroundColor(.white)
+ .font(.system(size: 24))
+ })
+ .frame(width: 60, height: 60)
+ .background(Color.blue)
+ .cornerRadius(30.0)
+ .padding(EdgeInsets(top: 0, leading: 0, bottom: 0, trailing: 16.0))
+ }
+ }.frame(maxWidth: .infinity)
+ }
+ }
+ .onAppear {
+ model.startRunning(preference)
+ }
+ .onDisappear {
+ model.stopRunning()
+ }.alert(isPresented: $model.isShowError) {
+ Alert(
+ title: Text("Error"),
+ message: Text(String(describing: model.error)),
+ dismissButton: .default(Text("OK"))
+ )
+ }
+ }
+}
+
+#Preview {
+ if #available(iOS 17.0, *) {
+ UVCView()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift
new file mode 100644
index 000000000..b88d635ef
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift
@@ -0,0 +1,250 @@
+import AVFoundation
+import HaishinKit
+import Photos
+import RTCHaishinKit
+import SwiftUI
+
+@available(iOS 17.0, *)
+@MainActor
+final class UVCViewModel: ObservableObject {
+ @Published private(set) var error: Error? {
+ didSet {
+ if error != nil {
+ isShowError = true
+ }
+ }
+ }
+ @Published var isShowError = false
+ @Published private(set) var readyState: SessionReadyState = .closed
+ @Published private(set) var isRecording = false
+ @Published var isHDREnabled = false {
+ didSet {
+ Task {
+ do {
+ if isHDREnabled {
+ try await mixer.setDynamicRangeMode(.hdr)
+ } else {
+ try await mixer.setDynamicRangeMode(.sdr)
+ }
+ } catch {
+ logger.info(error)
+ }
+ }
+ }
+ }
+ // If you want to use the multi-camera feature, please make create a MediaMixer with a capture mode.
+ // let mixer = MediaMixer(captureSesionMode: .multi)
+ private(set) var mixer = MediaMixer(captureSessionMode: .single)
+ private var tasks: [Task] = []
+ private var session: (any Session)?
+ private var recorder: StreamRecorder?
+
+ init() {
+ NotificationCenter.default.addObserver(
+ forName: .AVCaptureDeviceWasConnected,
+ object: nil,
+ queue: .main
+ ) { notif in
+ guard let device = notif.object as? AVCaptureDevice else { return }
+ logger.info(device)
+ self.deviceConnected()
+ }
+ }
+
+ func startPublishing(_ preference: PreferenceViewModel) {
+ Task {
+ guard let session else {
+ return
+ }
+ do {
+ try await session.connect {
+ Task { @MainActor in
+ self.isShowError = true
+ }
+ }
+ } catch {
+ self.error = error
+ logger.error(error)
+ }
+ }
+ }
+
+ func stopPublishing() {
+ Task {
+ do {
+ try await session?.close()
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ func toggleRecording() {
+ if isRecording {
+ Task {
+ do {
+ // To use this in a product, you need to consider recovery procedures in case moving to the Photo Library fails.
+ if let videoFile = try await recorder?.stopRecording() {
+ Task.detached {
+ try await PHPhotoLibrary.shared().performChanges {
+ let creationRequest = PHAssetCreationRequest.forAsset()
+ creationRequest.addResource(with: .video, fileURL: videoFile, options: nil)
+ }
+ }
+ }
+ } catch let error as StreamRecorder.Error {
+ switch error {
+ case .failedToFinishWriting(let error):
+ self.error = error
+ if let error {
+ logger.warn(error)
+ }
+ default:
+ self.error = error
+ logger.warn(error)
+ }
+ }
+ recorder = nil
+ isRecording = false
+ }
+ } else {
+ Task {
+ let recorder = StreamRecorder()
+ await mixer.addOutput(recorder)
+ do {
+ // When starting a recording while connected to Xcode, it freezes for about 30 seconds. iOS26 + Xcode26.
+ try await recorder.startRecording()
+ isRecording = true
+ self.recorder = recorder
+ } catch {
+ self.error = error
+ logger.warn(error)
+ }
+ for await error in await recorder.error {
+ switch error {
+ case .failedToAppend(let error):
+ self.error = error
+ default:
+ self.error = error
+ }
+ break
+ }
+ }
+ }
+ }
+
+ func makeSession(_ preference: PreferenceViewModel) async {
+ // Make session.
+ do {
+ session = try await SessionBuilderFactory.shared.make(preference.makeURL())
+ .setMode(.publish)
+ .build()
+ guard let session else {
+ return
+ }
+ await mixer.addOutput(session.stream)
+ tasks.append(Task {
+ for await readyState in await session.readyState {
+ self.readyState = readyState
+ switch readyState {
+ case .open:
+ UIApplication.shared.isIdleTimerDisabled = false
+ default:
+ UIApplication.shared.isIdleTimerDisabled = true
+ }
+ }
+ })
+ } catch {
+ self.error = error
+ }
+ do {
+ if let session {
+ try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings))
+ }
+ } catch {
+ self.error = error
+ }
+ do {
+ if let session {
+ try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings))
+ }
+ } catch {
+ self.error = error
+ }
+ }
+
+ func startRunning(_ preference: PreferenceViewModel) {
+ let session = AVAudioSession.sharedInstance()
+ do {
+ try session.setCategory(.playAndRecord, mode: .videoRecording, options: [.defaultToSpeaker, .allowBluetoothHFP])
+ try session.setActive(true)
+ } catch {
+ logger.error(error)
+ }
+
+ Task {
+ var videoMixerSettings = await mixer.videoMixerSettings
+ videoMixerSettings.mode = .passthrough
+ await mixer.setVideoMixerSettings(videoMixerSettings)
+ // Attach devices
+ do {
+ try await mixer.attachVideo(AVCaptureDevice.default(.external, for: .video, position: .unspecified))
+ } catch {
+ logger.error(error)
+ }
+ try? await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
+ await mixer.startRunning()
+ await makeSession(preference)
+ }
+ Task { @ScreenActor in
+ if await preference.isGPURendererEnabled {
+ await mixer.screen.isGPURendererEnabled = true
+ } else {
+ await mixer.screen.isGPURendererEnabled = false
+ }
+ await mixer.screen.size = .init(width: 720, height: 1280)
+ await mixer.screen.backgroundColor = UIColor.black.cgColor
+ }
+ }
+
+ func stopRunning() {
+ Task {
+ await mixer.stopRunning()
+ try? await mixer.attachAudio(nil)
+ try? await mixer.attachVideo(nil)
+ if let session {
+ await mixer.removeOutput(session.stream)
+ }
+ tasks.forEach { $0.cancel() }
+ tasks.removeAll()
+ }
+ }
+
+ private func deviceConnected() {
+ Task {
+ do {
+ try await mixer.attachVideo(AVCaptureDevice.default(.external, for: .video, position: .unspecified))
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+}
+
+@available(iOS 17.0, *)
+extension UVCViewModel: MTHKViewRepresentable.PreviewSource {
+ nonisolated func connect(to view: MTHKView) {
+ Task {
+ await mixer.addOutput(view)
+ }
+ }
+}
+
+@available(iOS 17.0, *)
+extension UVCViewModel: PiPHKViewRepresentable.PreviewSource {
+ nonisolated func connect(to view: PiPHKView) {
+ Task {
+ await mixer.addOutput(view)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift b/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift
new file mode 100644
index 000000000..e20567f67
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift
@@ -0,0 +1,51 @@
+import AVFoundation
+import CoreImage
+import HaishinKit
+
+final class MonochromeEffect: VideoEffect {
+ let filter: CIFilter? = CIFilter(name: "CIColorMonochrome")
+
+ func execute(_ image: CIImage) -> CIImage {
+ guard let filter else {
+ return image
+ }
+ filter.setValue(image, forKey: "inputImage")
+ filter.setValue(CIColor(red: 0.75, green: 0.75, blue: 0.75), forKey: "inputColor")
+ filter.setValue(1.0, forKey: "inputIntensity")
+ return filter.outputImage ?? image
+ }
+}
+
+final class VividEffect: VideoEffect {
+ let filter: CIFilter? = CIFilter(name: "CIColorControls")
+
+ func execute(_ image: CIImage) -> CIImage {
+ guard let filter else {
+ return image
+ }
+ filter.setValue(image, forKey: "inputImage")
+ filter.setValue(1.5, forKey: "inputSaturation")
+ filter.setValue(1.15, forKey: "inputContrast")
+ return filter.outputImage ?? image
+ }
+}
+
+final class WarmEffect: VideoEffect {
+ let filter: CIFilter? = CIFilter(name: "CITemperatureAndTint")
+ let controls: CIFilter? = CIFilter(name: "CIColorControls")
+
+ func execute(_ image: CIImage) -> CIImage {
+ guard let filter, let controls else {
+ return image
+ }
+ filter.setValue(image, forKey: "inputImage")
+ filter.setValue(CIVector(x: 6500, y: 0), forKey: "inputNeutral")
+ filter.setValue(CIVector(x: 4000, y: 0), forKey: "inputTargetNeutral")
+ guard let warmed = filter.outputImage else { return image }
+
+ controls.setValue(warmed, forKey: "inputImage")
+ controls.setValue(1.1, forKey: "inputSaturation")
+ controls.setValue(1.05, forKey: "inputContrast")
+ return controls.outputImage ?? image
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json
new file mode 100644
index 000000000..eb8789700
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "colors" : [
+ {
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png
new file mode 100644
index 000000000..62e3de039
Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png differ
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 000000000..bc26afc71
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,59 @@
+{
+ "images" : [
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "16x16"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "16x16"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "32x32"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "32x32"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "128x128"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "128x128"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "256x256"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "256x256"
+ },
+ {
+ "idiom" : "mac",
+ "scale" : "1x",
+ "size" : "512x512"
+ },
+ {
+ "filename" : "AppIcon.png",
+ "idiom" : "mac",
+ "scale" : "2x",
+ "size" : "512x512"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift
new file mode 100644
index 000000000..70479972f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift
@@ -0,0 +1,54 @@
+import SwiftUI
+
+struct ContentView: View {
+ enum Tab: String, CaseIterable, Identifiable {
+ case playback
+ case publish
+ case preference
+
+ var id: String { rawValue }
+ }
+
+ @State private var selection: Tab = .playback
+
+ var body: some View {
+ NavigationSplitView {
+ List(Tab.allCases, selection: $selection) { tab in
+ Label(tabTitle(tab), systemImage: tabIcon(tab)).onTapGesture {
+ selection = tab
+ }
+ }
+ } detail: {
+ switch selection {
+ case .playback:
+ PlaybackView()
+ case .publish:
+ PublishView()
+ case .preference:
+ PreferenceView()
+ }
+ }
+ }
+
+ private func tabTitle(_ tab: Tab) -> String {
+ switch tab {
+ case .playback:
+ return "Playback"
+ case .publish:
+ return "Publish"
+ case .preference:
+ return "Preference"
+ }
+ }
+
+ private func tabIcon(_ tab: Tab) -> String {
+ switch tab {
+ case .playback:
+ return "play.circle"
+ case .publish:
+ return "record.circle"
+ case .preference:
+ return "info.circle"
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements
new file mode 100644
index 000000000..6133db3ff
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements
@@ -0,0 +1,18 @@
+
+
+
+
+ com.apple.security.app-sandbox
+
+ com.apple.security.device.audio-input
+
+ com.apple.security.device.camera
+
+ com.apple.security.files.user-selected.read-only
+
+ com.apple.security.network.client
+
+ com.apple.security.network.server
+
+
+
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift
new file mode 100644
index 000000000..7c833042e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift
@@ -0,0 +1,34 @@
+import HaishinKit
+@preconcurrency import Logboard
+import RTCHaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+import SwiftUI
+
+let logger = LBLogger.with("com.haishinkit.HaishinKit.HaishinApp")
+
+@main
+struct HaishinApp: App {
+ @State private var preference = PreferenceViewModel()
+
+ var body: some Scene {
+ WindowGroup {
+ ContentView().environmentObject(preference)
+ }
+ }
+
+ init() {
+ Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+ await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+
+ await RTCLogger.shared.setLevel(.debug)
+ await SRTLogger.shared.setLevel(.debug)
+ }
+ LBLogger(kHaishinKitIdentifier).level = .debug
+ LBLogger(kSRTHaishinKitIdentifier).level = .debug
+ LBLogger(kRTCHaishinKitIdentifier).level = .debug
+ LBLogger(kRTMPHaishinKitIdentifier).level = .debug
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift
new file mode 100644
index 000000000..dd201f9f2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift
@@ -0,0 +1,57 @@
+import HaishinKit
+import SwiftUI
+
+struct PlaybackView: View {
+ @StateObject private var model = PlaybackViewModel()
+ @EnvironmentObject var preference: PreferenceViewModel
+
+ var body: some View {
+ ZStack {
+ VStack {
+ PiPHKViewRepresentable(previewSource: model)
+ }
+ if model.readyState == .connecting {
+ VStack {
+ ProgressView()
+ }
+ }
+ }.alert(isPresented: $model.isShowError) {
+ Alert(
+ title: Text("Error"),
+ message: Text(model.error?.localizedDescription ?? ""),
+ dismissButton: .default(Text("OK"))
+ )
+ }
+ .navigationTitle("Playback")
+ .toolbar {
+ switch model.readyState {
+ case .connecting:
+ ToolbarItem(placement: .primaryAction) {
+ }
+ case .open:
+ ToolbarItem(placement: .primaryAction) {
+ Button(action: {
+ model.stop()
+ }) {
+ Image(systemName: "stop.circle")
+ }
+ }
+ case .closed:
+ ToolbarItem(placement: .primaryAction) {
+ Button(action: {
+ model.start(preference)
+ }) {
+ Image(systemName: "play.circle")
+ }
+ }
+ case .closing:
+ ToolbarItem(placement: .primaryAction) {
+ }
+ }
+ }
+ }
+}
+
+#Preview {
+ PlaybackView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift
new file mode 100644
index 000000000..6dfdc8181
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift
@@ -0,0 +1,74 @@
+@preconcurrency import AVKit
+import HaishinKit
+import SwiftUI
+
+@MainActor
+final class PlaybackViewModel: ObservableObject {
+ @Published private(set) var readyState: SessionReadyState = .closed
+ @Published private(set) var error: Error?
+ @Published var isShowError = false
+
+ private var view: PiPHKView?
+ private var session: (any Session)?
+ private let audioPlayer = AudioPlayer(audioEngine: AVAudioEngine())
+ private var pictureInPictureController: AVPictureInPictureController?
+
+ func start(_ preference: PreferenceViewModel) {
+ Task {
+ if session == nil {
+ await makeSession(preference)
+ }
+ do {
+ try await session?.connect {
+ Task { @MainActor in
+ self.isShowError = true
+ }
+ }
+ } catch {
+ self.error = error
+ self.isShowError = true
+ }
+ }
+ }
+
+ func stop() {
+ Task {
+ do {
+ try await session?.close()
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ private func makeSession(_ preference: PreferenceViewModel) async {
+ do {
+ session = try await SessionBuilderFactory.shared.make(preference.makeURL())
+ .setMode(.playback)
+ .build()
+ guard let session else {
+ return
+ }
+ if let view {
+ await session.stream.addOutput(view)
+ }
+ await session.stream.attachAudioPlayer(audioPlayer)
+ Task {
+ for await readyState in await session.readyState {
+ self.readyState = readyState
+ }
+ }
+ } catch {
+ logger.error(error)
+ }
+ }
+}
+
+extension PlaybackViewModel: PiPHKViewRepresentable.PreviewSource {
+ // MARK: PiPHKSwiftUiView.PreviewSource
+ nonisolated func connect(to view: PiPHKView) {
+ Task { @MainActor in
+ self.view = view
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift
new file mode 100644
index 000000000..502633508
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift
@@ -0,0 +1,110 @@
+import AVFoundation
+import HaishinKit
+import SwiftUI
+
+enum FPS: String, CaseIterable, Identifiable {
+ case fps15 = "15"
+ case fps30 = "30"
+ case fps60 = "60"
+
+ var frameRate: Float64 {
+ switch self {
+ case .fps15:
+ return 15
+ case .fps30:
+ return 30
+ case .fps60:
+ return 60
+ }
+ }
+
+ var id: Self { self }
+}
+
+enum VideoEffectItem: String, CaseIterable, Identifiable, Sendable {
+ case none
+ case monochrome
+
+ var id: Self { self }
+
+ func makeVideoEffect() -> VideoEffect? {
+ switch self {
+ case .none:
+ return nil
+ case .monochrome:
+ return MonochromeEffect()
+ }
+ }
+}
+
+struct PublishView: View {
+ @Environment(\.horizontalSizeClass) private var horizontalSizeClass
+ @EnvironmentObject var preference: PreferenceViewModel
+ @StateObject private var model = PublishViewModel()
+
+ var body: some View {
+ ZStack {
+ VStack {
+ MTHKViewRepresentable(previewSource: model)
+ }
+ VStack(alignment: .trailing) {
+ Picker("FPS", selection: $model.currentFPS) {
+ ForEach(FPS.allCases) {
+ Text($0.rawValue).tag($0)
+ }
+ }
+ .onChange(of: model.currentFPS) { tag in
+ model.setFrameRate(tag.frameRate)
+ }
+ .pickerStyle(.segmented)
+ .frame(width: 150)
+ .padding()
+ Spacer()
+ }
+ }
+ .onAppear {
+ model.startRunning(preference)
+ }
+ .onDisappear {
+ model.stopRunning()
+ }
+ .navigationTitle("Publish")
+ .toolbar {
+ switch model.readyState {
+ case .connecting:
+ ToolbarItem(placement: .primaryAction) {
+ }
+ case .open:
+ ToolbarItem(placement: .primaryAction) {
+ Button(action: {
+ model.stopPublishing()
+ }) {
+ Image(systemName: "stop.circle")
+ }
+ }
+ case .closed:
+ ToolbarItem(placement: .primaryAction) {
+ Button(action: {
+ model.startPublishing(preference)
+ }) {
+ Image(systemName: "record.circle")
+ }
+ }
+ case .closing:
+ ToolbarItem(placement: .primaryAction) {
+ }
+ }
+ }
+ .alert(isPresented: $model.isShowError) {
+ Alert(
+ title: Text("Error"),
+ message: Text(model.error?.localizedDescription ?? ""),
+ dismissButton: .default(Text("OK"))
+ )
+ }
+ }
+}
+
+#Preview {
+ PublishView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift b/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift
new file mode 100644
index 000000000..18575d681
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift
@@ -0,0 +1,174 @@
+import AVFoundation
+import HaishinKit
+import RTCHaishinKit
+import SwiftUI
+
+@MainActor
+final class PublishViewModel: ObservableObject {
+ @Published var currentFPS: FPS = .fps30
+ @Published var visualEffectItem: VideoEffectItem = .none
+ @Published private(set) var error: Error?
+ @Published var isShowError = false
+ @Published private(set) var isTorchEnabled = false
+ @Published private(set) var readyState: SessionReadyState = .closed
+ private(set) var mixer = MediaMixer(captureSessionMode: .multi)
+ private var tasks: [Task] = []
+ private var session: (any Session)?
+ private var currentPosition: AVCaptureDevice.Position = .back
+ @ScreenActor private var currentVideoEffect: VideoEffect?
+
+ func startPublishing(_ preference: PreferenceViewModel) {
+ Task {
+ guard let session else {
+ return
+ }
+ do {
+ try await session.connect {
+ Task { @MainActor in
+ self.isShowError = true
+ }
+ }
+ } catch {
+ self.error = error
+ self.isShowError = true
+ logger.error(error)
+ }
+ }
+ }
+
+ func stopPublishing() {
+ Task {
+ do {
+ try await session?.close()
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ func makeSession(_ preference: PreferenceViewModel) async {
+ // Make session.
+ do {
+ session = try await SessionBuilderFactory.shared.make(preference.makeURL())
+ .setMode(.publish)
+ .build()
+ guard let session else {
+ return
+ }
+ await mixer.addOutput(session.stream)
+ tasks.append(Task {
+ for await readyState in await session.readyState {
+ self.readyState = readyState
+ }
+ })
+ } catch {
+ self.error = error
+ isShowError = true
+ }
+ do {
+ if let session {
+ try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings))
+ }
+ } catch {
+ self.error = error
+ isShowError = true
+ }
+ do {
+ if let session {
+ try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings))
+ }
+ } catch {
+ self.error = error
+ isShowError = true
+ }
+ }
+
+ func startRunning(_ preference: PreferenceViewModel) {
+ Task {
+ // SetUp a mixer.
+ var videoMixerSettings = await mixer.videoMixerSettings
+ videoMixerSettings.mode = .offscreen
+ await mixer.setVideoMixerSettings(videoMixerSettings)
+ // Attach devices
+ let back = AVCaptureDevice.default(for: .video)
+ try? await mixer.attachVideo(back, track: 0)
+ let audio = AVCaptureDevice.default(for: .audio)
+ try? await mixer.attachAudio(audio, track: 0)
+ await mixer.startRunning()
+ await makeSession(preference)
+ }
+ Task { @ScreenActor in
+ if await preference.isGPURendererEnabled {
+ await mixer.screen.isGPURendererEnabled = true
+ } else {
+ await mixer.screen.isGPURendererEnabled = false
+ }
+ let assetScreenObject = AssetScreenObject()
+ assetScreenObject.size = .init(width: 180, height: 180)
+ assetScreenObject.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0)
+ try? assetScreenObject.startReading(AVAsset(url: URL(fileURLWithPath: Bundle.main.path(forResource: "SampleVideo_360x240_5mb", ofType: "mp4") ?? "")))
+ try? await mixer.screen.addChild(assetScreenObject)
+ await mixer.screen.size = .init(width: 1280, height: 720)
+ await mixer.screen.backgroundColor = NSColor.black.cgColor
+ }
+ }
+
+ func stopRunning() {
+ Task {
+ await mixer.stopRunning()
+ try? await mixer.attachAudio(nil)
+ try? await mixer.attachVideo(nil)
+ if let session {
+ await mixer.removeOutput(session.stream)
+ }
+ tasks.forEach { $0.cancel() }
+ tasks.removeAll()
+ }
+ }
+
+ func setVisualEffet(_ videoEffect: VideoEffectItem) {
+ Task { @ScreenActor in
+ if let currentVideoEffect {
+ _ = await mixer.screen.unregisterVideoEffect(currentVideoEffect)
+ }
+ if let videoEffect = videoEffect.makeVideoEffect() {
+ currentVideoEffect = videoEffect
+ _ = await mixer.screen.registerVideoEffect(videoEffect)
+ }
+ }
+ }
+
+ func setFrameRate(_ fps: Float64) {
+ Task {
+ do {
+ // Sets to input frameRate.
+ try? await mixer.configuration(video: 0) { video in
+ do {
+ try video.setFrameRate(fps)
+ } catch {
+ logger.error(error)
+ }
+ }
+ try? await mixer.configuration(video: 1) { video in
+ do {
+ try video.setFrameRate(fps)
+ } catch {
+ logger.error(error)
+ }
+ }
+ // Sets to output frameRate.
+ try await mixer.setFrameRate(fps)
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+}
+
+extension PublishViewModel: MTHKViewRepresentable.PreviewSource {
+ nonisolated func connect(to view: HaishinKit.MTHKView) {
+ Task {
+ await mixer.addOutput(view)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4 b/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4
new file mode 100644
index 000000000..f27c0927f
Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4 differ
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json
new file mode 100644
index 000000000..eb8789700
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "colors" : [
+ {
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..2e003356c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json
new file mode 100644
index 000000000..de59d885a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json
@@ -0,0 +1,17 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ },
+ "layers" : [
+ {
+ "filename" : "Front.imagestacklayer"
+ },
+ {
+ "filename" : "Middle.imagestacklayer"
+ },
+ {
+ "filename" : "Back.imagestacklayer"
+ }
+ ]
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..2e003356c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..2e003356c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..795cce172
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,16 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "tv",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json
new file mode 100644
index 000000000..de59d885a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json
@@ -0,0 +1,17 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ },
+ "layers" : [
+ {
+ "filename" : "Front.imagestacklayer"
+ },
+ {
+ "filename" : "Middle.imagestacklayer"
+ },
+ {
+ "filename" : "Back.imagestacklayer"
+ }
+ ]
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..795cce172
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,16 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "tv",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json
new file mode 100644
index 000000000..795cce172
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json
@@ -0,0 +1,16 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "tv",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json
new file mode 100644
index 000000000..f47ba43da
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json
@@ -0,0 +1,32 @@
+{
+ "assets" : [
+ {
+ "filename" : "App Icon - App Store.imagestack",
+ "idiom" : "tv",
+ "role" : "primary-app-icon",
+ "size" : "1280x768"
+ },
+ {
+ "filename" : "App Icon.imagestack",
+ "idiom" : "tv",
+ "role" : "primary-app-icon",
+ "size" : "400x240"
+ },
+ {
+ "filename" : "Top Shelf Image Wide.imageset",
+ "idiom" : "tv",
+ "role" : "top-shelf-image-wide",
+ "size" : "2320x720"
+ },
+ {
+ "filename" : "Top Shelf Image.imageset",
+ "idiom" : "tv",
+ "role" : "top-shelf-image",
+ "size" : "1920x720"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json
new file mode 100644
index 000000000..795cce172
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json
@@ -0,0 +1,16 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "tv",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json
new file mode 100644
index 000000000..795cce172
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json
@@ -0,0 +1,16 @@
+{
+ "images" : [
+ {
+ "idiom" : "tv",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "tv",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift
new file mode 100644
index 000000000..cafb27af0
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift
@@ -0,0 +1,23 @@
+import SwiftUI
+
+struct ContentView: View {
+ var body: some View {
+ TabView {
+ PlaybackView()
+ .tabItem {
+ Image(systemName: "play.circle")
+ Text("Playback")
+ }
+
+ PreferenceView()
+ .tabItem {
+ Image(systemName: "person.circle")
+ Text("Preference")
+ }
+ }
+ }
+}
+
+#Preview {
+ ContentView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift
new file mode 100644
index 000000000..e30d3a383
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift
@@ -0,0 +1,34 @@
+import HaishinKit
+@preconcurrency import Logboard
+import RTCHaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+import SwiftUI
+
+let logger = LBLogger.with("com.haishinkit.HaishinKit.HaishinApp")
+
+@main
+struct HaishinApp: App {
+ @State private var preference = PreferenceViewModel()
+
+ var body: some Scene {
+ WindowGroup {
+ ContentView().environmentObject(preference)
+ }
+ }
+
+ init() {
+ Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+ await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+
+ await RTCLogger.shared.setLevel(.debug)
+ await SRTLogger.shared.setLevel(.debug)
+ }
+ LBLogger(kHaishinKitIdentifier).level = .debug
+ LBLogger(kRTCHaishinKitIdentifier).level = .debug
+ LBLogger(kRTMPHaishinKitIdentifier).level = .debug
+ LBLogger(kSRTHaishinKitIdentifier).level = .debug
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift
new file mode 100644
index 000000000..bf9424793
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift
@@ -0,0 +1,8 @@
+import SwiftUI
+
+struct PublishView: View {
+ var body: some View {
+ ZStack {
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift
new file mode 100644
index 000000000..d341de8e7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift
@@ -0,0 +1,16 @@
+import HaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+import SwiftUI
+
+struct ContentView: View {
+ var body: some View {
+ VStack {
+ PlaybackView()
+ }
+ }
+}
+
+#Preview(windowStyle: .automatic) {
+ ContentView()
+}
diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift
new file mode 100644
index 000000000..f1e081c35
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift
@@ -0,0 +1,23 @@
+import HaishinKit
+@preconcurrency import Logboard
+import RTMPHaishinKit
+import SRTHaishinKit
+import SwiftUI
+
+let logger = LBLogger.with("com.haishinkit.HaishinKit.visionOSApp")
+
+@main
+struct HaishinApp: App {
+ var body: some Scene {
+ WindowGroup {
+ ContentView()
+ }
+ }
+
+ init() {
+ Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist b/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist
new file mode 100644
index 000000000..20f75e2af
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist
@@ -0,0 +1,15 @@
+
+
+
+
+ UIApplicationSceneManifest
+
+ UIApplicationPreferredDefaultSceneSessionRole
+ UIWindowSceneSessionRoleApplication
+ UIApplicationSupportsMultipleScenes
+
+ UISceneConfigurations
+
+
+
+
diff --git a/Vendor/HaishinKit.swift/Gemfile b/Vendor/HaishinKit.swift/Gemfile
new file mode 100644
index 000000000..9135f1785
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Gemfile
@@ -0,0 +1,8 @@
+source 'https://rubygems.org'
+
+gem 'cocoapods'
+gem 'fastlane'
+gem 'rubocop'
+gem 'danger'
+gem 'abbrev'
+gem 'danger-privacymanifest', git: 'https://github.com/shogo4405/danger-privacymanifest'
diff --git a/Vendor/HaishinKit.swift/Gemfile.lock b/Vendor/HaishinKit.swift/Gemfile.lock
new file mode 100644
index 000000000..1e7673d94
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Gemfile.lock
@@ -0,0 +1,412 @@
+GIT
+ remote: https://github.com/shogo4405/danger-privacymanifest
+ revision: 64757a8dd2d121a7996738d1cbc58fdfc0b297a8
+ specs:
+ danger-privacymanifest (0.0.1)
+ danger-plugin-api (~> 1.0)
+
+GEM
+ remote: https://rubygems.org/
+ specs:
+ CFPropertyList (3.0.8)
+ abbrev (0.1.2)
+ activesupport (7.2.3)
+ base64
+ benchmark (>= 0.3)
+ bigdecimal
+ concurrent-ruby (~> 1.0, >= 1.3.1)
+ connection_pool (>= 2.2.5)
+ drb
+ i18n (>= 1.6, < 2)
+ logger (>= 1.4.2)
+ minitest (>= 5.1)
+ securerandom (>= 0.3)
+ tzinfo (~> 2.0, >= 2.0.5)
+ addressable (2.8.8)
+ public_suffix (>= 2.0.2, < 8.0)
+ algoliasearch (1.27.5)
+ httpclient (~> 2.8, >= 2.8.3)
+ json (>= 1.5.1)
+ artifactory (3.0.17)
+ ast (2.4.3)
+ atomos (0.1.3)
+ aws-eventstream (1.4.0)
+ aws-partitions (1.1209.0)
+ aws-sdk-core (3.241.4)
+ aws-eventstream (~> 1, >= 1.3.0)
+ aws-partitions (~> 1, >= 1.992.0)
+ aws-sigv4 (~> 1.9)
+ base64
+ bigdecimal
+ jmespath (~> 1, >= 1.6.1)
+ logger
+ aws-sdk-kms (1.121.0)
+ aws-sdk-core (~> 3, >= 3.241.4)
+ aws-sigv4 (~> 1.5)
+ aws-sdk-s3 (1.212.0)
+ aws-sdk-core (~> 3, >= 3.241.4)
+ aws-sdk-kms (~> 1)
+ aws-sigv4 (~> 1.5)
+ aws-sigv4 (1.12.1)
+ aws-eventstream (~> 1, >= 1.0.2)
+ babosa (1.0.4)
+ base64 (0.2.0)
+ benchmark (0.5.0)
+ bigdecimal (4.0.1)
+ claide (1.1.0)
+ claide-plugins (0.9.2)
+ cork
+ nap
+ open4 (~> 1.3)
+ cocoapods (1.16.2)
+ addressable (~> 2.8)
+ claide (>= 1.0.2, < 2.0)
+ cocoapods-core (= 1.16.2)
+ cocoapods-deintegrate (>= 1.0.3, < 2.0)
+ cocoapods-downloader (>= 2.1, < 3.0)
+ cocoapods-plugins (>= 1.0.0, < 2.0)
+ cocoapods-search (>= 1.0.0, < 2.0)
+ cocoapods-trunk (>= 1.6.0, < 2.0)
+ cocoapods-try (>= 1.1.0, < 2.0)
+ colored2 (~> 3.1)
+ escape (~> 0.0.4)
+ fourflusher (>= 2.3.0, < 3.0)
+ gh_inspector (~> 1.0)
+ molinillo (~> 0.8.0)
+ nap (~> 1.0)
+ ruby-macho (>= 2.3.0, < 3.0)
+ xcodeproj (>= 1.27.0, < 2.0)
+ cocoapods-core (1.16.2)
+ activesupport (>= 5.0, < 8)
+ addressable (~> 2.8)
+ algoliasearch (~> 1.0)
+ concurrent-ruby (~> 1.1)
+ fuzzy_match (~> 2.0.4)
+ nap (~> 1.0)
+ netrc (~> 0.11)
+ public_suffix (~> 4.0)
+ typhoeus (~> 1.0)
+ cocoapods-deintegrate (1.0.5)
+ cocoapods-downloader (2.1)
+ cocoapods-plugins (1.0.0)
+ nap
+ cocoapods-search (1.0.1)
+ cocoapods-trunk (1.6.0)
+ nap (>= 0.8, < 2.0)
+ netrc (~> 0.11)
+ cocoapods-try (1.2.0)
+ colored (1.2)
+ colored2 (3.1.2)
+ commander (4.6.0)
+ highline (~> 2.0.0)
+ concurrent-ruby (1.3.6)
+ connection_pool (3.0.2)
+ cork (0.3.0)
+ colored2 (~> 3.1)
+ csv (3.3.5)
+ danger (9.5.3)
+ base64 (~> 0.2)
+ claide (~> 1.0)
+ claide-plugins (>= 0.9.2)
+ colored2 (>= 3.1, < 5)
+ cork (~> 0.1)
+ faraday (>= 0.9.0, < 3.0)
+ faraday-http-cache (~> 2.0)
+ git (>= 1.13, < 3.0)
+ kramdown (>= 2.5.1, < 3.0)
+ kramdown-parser-gfm (~> 1.0)
+ octokit (>= 4.0)
+ pstore (~> 0.1)
+ terminal-table (>= 1, < 5)
+ danger-plugin-api (1.0.0)
+ danger (> 2.0)
+ declarative (0.0.20)
+ digest-crc (0.7.0)
+ rake (>= 12.0.0, < 14.0.0)
+ domain_name (0.6.20240107)
+ dotenv (2.8.1)
+ drb (2.2.3)
+ emoji_regex (3.2.3)
+ escape (0.0.4)
+ ethon (0.15.0)
+ ffi (>= 1.15.0)
+ excon (0.112.0)
+ faraday (1.10.4)
+ faraday-em_http (~> 1.0)
+ faraday-em_synchrony (~> 1.0)
+ faraday-excon (~> 1.1)
+ faraday-httpclient (~> 1.0)
+ faraday-multipart (~> 1.0)
+ faraday-net_http (~> 1.0)
+ faraday-net_http_persistent (~> 1.0)
+ faraday-patron (~> 1.0)
+ faraday-rack (~> 1.0)
+ faraday-retry (~> 1.0)
+ ruby2_keywords (>= 0.0.4)
+ faraday-cookie_jar (0.0.8)
+ faraday (>= 0.8.0)
+ http-cookie (>= 1.0.0)
+ faraday-em_http (1.0.0)
+ faraday-em_synchrony (1.0.1)
+ faraday-excon (1.1.0)
+ faraday-http-cache (2.5.1)
+ faraday (>= 0.8)
+ faraday-httpclient (1.0.1)
+ faraday-multipart (1.2.0)
+ multipart-post (~> 2.0)
+ faraday-net_http (1.0.2)
+ faraday-net_http_persistent (1.2.0)
+ faraday-patron (1.0.0)
+ faraday-rack (1.0.0)
+ faraday-retry (1.0.3)
+ faraday_middleware (1.2.1)
+ faraday (~> 1.0)
+ fastimage (2.4.0)
+ fastlane (2.231.1)
+ CFPropertyList (>= 2.3, < 4.0.0)
+ abbrev (~> 0.1.2)
+ addressable (>= 2.8, < 3.0.0)
+ artifactory (~> 3.0)
+ aws-sdk-s3 (~> 1.0)
+ babosa (>= 1.0.3, < 2.0.0)
+ base64 (~> 0.2.0)
+ benchmark (>= 0.1.0)
+ bundler (>= 1.17.3, < 5.0.0)
+ colored (~> 1.2)
+ commander (~> 4.6)
+ csv (~> 3.3)
+ dotenv (>= 2.1.1, < 3.0.0)
+ emoji_regex (>= 0.1, < 4.0)
+ excon (>= 0.71.0, < 1.0.0)
+ faraday (~> 1.0)
+ faraday-cookie_jar (~> 0.0.6)
+ faraday_middleware (~> 1.0)
+ fastimage (>= 2.1.0, < 3.0.0)
+ fastlane-sirp (>= 1.0.0)
+ gh_inspector (>= 1.1.2, < 2.0.0)
+ google-apis-androidpublisher_v3 (~> 0.3)
+ google-apis-playcustomapp_v1 (~> 0.1)
+ google-cloud-env (>= 1.6.0, < 2.0.0)
+ google-cloud-storage (~> 1.31)
+ highline (~> 2.0)
+ http-cookie (~> 1.0.5)
+ json (< 3.0.0)
+ jwt (>= 2.1.0, < 3)
+ logger (>= 1.6, < 2.0)
+ mini_magick (>= 4.9.4, < 5.0.0)
+ multipart-post (>= 2.0.0, < 3.0.0)
+ mutex_m (~> 0.3.0)
+ naturally (~> 2.2)
+ nkf (~> 0.2.0)
+ optparse (>= 0.1.1, < 1.0.0)
+ ostruct (>= 0.1.0)
+ plist (>= 3.1.0, < 4.0.0)
+ rubyzip (>= 2.0.0, < 3.0.0)
+ security (= 0.1.5)
+ simctl (~> 1.6.3)
+ terminal-notifier (>= 2.0.0, < 3.0.0)
+ terminal-table (~> 3)
+ tty-screen (>= 0.6.3, < 1.0.0)
+ tty-spinner (>= 0.8.0, < 1.0.0)
+ word_wrap (~> 1.0.0)
+ xcodeproj (>= 1.13.0, < 2.0.0)
+ xcpretty (~> 0.4.1)
+ xcpretty-travis-formatter (>= 0.0.3, < 2.0.0)
+ fastlane-sirp (1.0.0)
+ sysrandom (~> 1.0)
+ ffi (1.17.3)
+ ffi (1.17.3-aarch64-linux-gnu)
+ ffi (1.17.3-aarch64-linux-musl)
+ ffi (1.17.3-arm-linux-gnu)
+ ffi (1.17.3-arm-linux-musl)
+ ffi (1.17.3-arm64-darwin)
+ ffi (1.17.3-x86-linux-gnu)
+ ffi (1.17.3-x86-linux-musl)
+ ffi (1.17.3-x86_64-darwin)
+ ffi (1.17.3-x86_64-linux-gnu)
+ ffi (1.17.3-x86_64-linux-musl)
+ fourflusher (2.3.1)
+ fuzzy_match (2.0.4)
+ gh_inspector (1.1.3)
+ git (2.3.3)
+ activesupport (>= 5.0)
+ addressable (~> 2.8)
+ process_executer (~> 1.1)
+ rchardet (~> 1.8)
+ google-apis-androidpublisher_v3 (0.54.0)
+ google-apis-core (>= 0.11.0, < 2.a)
+ google-apis-core (0.11.3)
+ addressable (~> 2.5, >= 2.5.1)
+ googleauth (>= 0.16.2, < 2.a)
+ httpclient (>= 2.8.1, < 3.a)
+ mini_mime (~> 1.0)
+ representable (~> 3.0)
+ retriable (>= 2.0, < 4.a)
+ rexml
+ google-apis-iamcredentials_v1 (0.17.0)
+ google-apis-core (>= 0.11.0, < 2.a)
+ google-apis-playcustomapp_v1 (0.13.0)
+ google-apis-core (>= 0.11.0, < 2.a)
+ google-apis-storage_v1 (0.31.0)
+ google-apis-core (>= 0.11.0, < 2.a)
+ google-cloud-core (1.8.0)
+ google-cloud-env (>= 1.0, < 3.a)
+ google-cloud-errors (~> 1.0)
+ google-cloud-env (1.6.0)
+ faraday (>= 0.17.3, < 3.0)
+ google-cloud-errors (1.5.0)
+ google-cloud-storage (1.47.0)
+ addressable (~> 2.8)
+ digest-crc (~> 0.4)
+ google-apis-iamcredentials_v1 (~> 0.1)
+ google-apis-storage_v1 (~> 0.31.0)
+ google-cloud-core (~> 1.6)
+ googleauth (>= 0.16.2, < 2.a)
+ mini_mime (~> 1.0)
+ googleauth (1.8.1)
+ faraday (>= 0.17.3, < 3.a)
+ jwt (>= 1.4, < 3.0)
+ multi_json (~> 1.11)
+ os (>= 0.9, < 2.0)
+ signet (>= 0.16, < 2.a)
+ highline (2.0.3)
+ http-cookie (1.0.8)
+ domain_name (~> 0.5)
+ httpclient (2.9.0)
+ mutex_m
+ i18n (1.14.8)
+ concurrent-ruby (~> 1.0)
+ jmespath (1.6.2)
+ json (2.18.0)
+ jwt (2.10.2)
+ base64
+ kramdown (2.5.1)
+ rexml (>= 3.3.9)
+ kramdown-parser-gfm (1.1.0)
+ kramdown (~> 2.0)
+ language_server-protocol (3.17.0.5)
+ lint_roller (1.1.0)
+ logger (1.7.0)
+ mini_magick (4.13.2)
+ mini_mime (1.1.5)
+ minitest (6.0.1)
+ prism (~> 1.5)
+ molinillo (0.8.0)
+ multi_json (1.19.1)
+ multipart-post (2.4.1)
+ mutex_m (0.3.0)
+ nanaimo (0.4.0)
+ nap (1.1.0)
+ naturally (2.3.0)
+ netrc (0.11.0)
+ nkf (0.2.0)
+ octokit (10.0.0)
+ faraday (>= 1, < 3)
+ sawyer (~> 0.9)
+ open4 (1.3.4)
+ optparse (0.8.1)
+ os (1.1.4)
+ ostruct (0.6.3)
+ parallel (1.27.0)
+ parser (3.3.10.1)
+ ast (~> 2.4.1)
+ racc
+ plist (3.7.2)
+ prism (1.9.0)
+ process_executer (1.3.0)
+ pstore (0.2.0)
+ public_suffix (4.0.7)
+ racc (1.8.1)
+ rainbow (3.1.1)
+ rake (13.3.1)
+ rchardet (1.10.0)
+ regexp_parser (2.11.3)
+ representable (3.2.0)
+ declarative (< 0.1.0)
+ trailblazer-option (>= 0.1.1, < 0.2.0)
+ uber (< 0.2.0)
+ retriable (3.1.2)
+ rexml (3.4.4)
+ rouge (3.28.0)
+ rubocop (1.84.0)
+ json (~> 2.3)
+ language_server-protocol (~> 3.17.0.2)
+ lint_roller (~> 1.1.0)
+ parallel (~> 1.10)
+ parser (>= 3.3.0.2)
+ rainbow (>= 2.2.2, < 4.0)
+ regexp_parser (>= 2.9.3, < 3.0)
+ rubocop-ast (>= 1.49.0, < 2.0)
+ ruby-progressbar (~> 1.7)
+ unicode-display_width (>= 2.4.0, < 4.0)
+ rubocop-ast (1.49.0)
+ parser (>= 3.3.7.2)
+ prism (~> 1.7)
+ ruby-macho (2.5.1)
+ ruby-progressbar (1.13.0)
+ ruby2_keywords (0.0.5)
+ rubyzip (2.4.1)
+ sawyer (0.9.3)
+ addressable (>= 2.3.5)
+ faraday (>= 0.17.3, < 3)
+ securerandom (0.4.1)
+ security (0.1.5)
+ signet (0.21.0)
+ addressable (~> 2.8)
+ faraday (>= 0.17.5, < 3.a)
+ jwt (>= 1.5, < 4.0)
+ multi_json (~> 1.10)
+ simctl (1.6.10)
+ CFPropertyList
+ naturally
+ sysrandom (1.0.5)
+ terminal-notifier (2.0.0)
+ terminal-table (3.0.2)
+ unicode-display_width (>= 1.1.1, < 3)
+ trailblazer-option (0.1.2)
+ tty-cursor (0.7.1)
+ tty-screen (0.8.2)
+ tty-spinner (0.9.3)
+ tty-cursor (~> 0.7)
+ typhoeus (1.5.0)
+ ethon (>= 0.9.0, < 0.16.0)
+ tzinfo (2.0.6)
+ concurrent-ruby (~> 1.0)
+ uber (0.1.0)
+ unicode-display_width (2.6.0)
+ word_wrap (1.0.0)
+ xcodeproj (1.27.0)
+ CFPropertyList (>= 2.3.3, < 4.0)
+ atomos (~> 0.1.3)
+ claide (>= 1.0.2, < 2.0)
+ colored2 (~> 3.1)
+ nanaimo (~> 0.4.0)
+ rexml (>= 3.3.6, < 4.0)
+ xcpretty (0.4.1)
+ rouge (~> 3.28.0)
+ xcpretty-travis-formatter (1.0.1)
+ xcpretty (~> 0.2, >= 0.0.7)
+
+PLATFORMS
+ aarch64-linux-gnu
+ aarch64-linux-musl
+ arm-linux-gnu
+ arm-linux-musl
+ arm64-darwin
+ ruby
+ x86-linux-gnu
+ x86-linux-musl
+ x86_64-darwin
+ x86_64-linux-gnu
+ x86_64-linux-musl
+
+DEPENDENCIES
+ abbrev
+ cocoapods
+ danger
+ danger-privacymanifest!
+ fastlane
+ rubocop
+
+BUNDLED WITH
+ 2.6.7
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift
new file mode 100644
index 000000000..14f5965fc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift
@@ -0,0 +1,91 @@
+import CoreMedia
+import Foundation
+
+package struct ADTSHeader: Equatable {
+ static let size: Int = 7
+ static let sizeWithCrc = 9
+ static let sync: UInt8 = 0xFF
+
+ var sync = Self.sync
+ var id: UInt8 = 0
+ var layer: UInt8 = 0
+ var protectionAbsent = false
+ var profile: UInt8 = 0
+ var sampleFrequencyIndex: UInt8 = 0
+ var channelConfiguration: UInt8 = 0
+ var originalOrCopy = false
+ var home = false
+ var copyrightIdBit = false
+ var copyrightIdStart = false
+ package var aacFrameLength: UInt16 = 0
+ var bufferFullness: UInt16 = 0
+ var aacFrames: UInt8 = 0
+
+ package init() {
+ }
+
+ package init(data: Data) {
+ self.data = data
+ }
+
+ package func makeFormatDescription() -> CMFormatDescription? {
+ guard
+ let type = AudioSpecificConfig.AudioObjectType(rawValue: profile + 1),
+ let frequency = AudioSpecificConfig.SamplingFrequency(rawValue: sampleFrequencyIndex),
+ let channel = AudioSpecificConfig.ChannelConfiguration(rawValue: channelConfiguration) else {
+ return nil
+ }
+ var formatDescription: CMAudioFormatDescription?
+ var audioStreamBasicDescription = AudioStreamBasicDescription(
+ mSampleRate: frequency.sampleRate,
+ mFormatID: kAudioFormatMPEG4AAC,
+ mFormatFlags: UInt32(type.rawValue),
+ mBytesPerPacket: 0,
+ mFramesPerPacket: 1024,
+ mBytesPerFrame: 0,
+ mChannelsPerFrame: UInt32(channel.rawValue),
+ mBitsPerChannel: 0,
+ mReserved: 0
+ )
+ guard CMAudioFormatDescriptionCreate(
+ allocator: kCFAllocatorDefault,
+ asbd: &audioStreamBasicDescription,
+ layoutSize: 0,
+ layout: nil,
+ magicCookieSize: 0,
+ magicCookie: nil,
+ extensions: nil,
+ formatDescriptionOut: &formatDescription
+ ) == noErr else {
+ return nil
+ }
+ return formatDescription
+ }
+}
+
+extension ADTSHeader: DataConvertible {
+ package var data: Data {
+ get {
+ Data()
+ }
+ set {
+ guard ADTSHeader.size <= newValue.count else {
+ return
+ }
+ sync = newValue[0]
+ id = (newValue[1] & 0b00001111) >> 3
+ layer = (newValue[1] >> 2) & 0b00000011
+ protectionAbsent = (newValue[1] & 0b00000001) == 1
+ profile = newValue[2] >> 6 & 0b11
+ sampleFrequencyIndex = (newValue[2] >> 2) & 0b00001111
+ channelConfiguration = ((newValue[2] & 0b1) << 2) | newValue[3] >> 6
+ originalOrCopy = (newValue[3] & 0b00100000) == 0b00100000
+ home = (newValue[3] & 0b00010000) == 0b00010000
+ copyrightIdBit = (newValue[3] & 0b00001000) == 0b00001000
+ copyrightIdStart = (newValue[3] & 0b00000100) == 0b00000100
+ aacFrameLength = UInt16(newValue[3] & 0b00000011) << 11 | UInt16(newValue[4]) << 3 | UInt16(newValue[5] >> 5)
+ bufferFullness = UInt16(newValue[5]) >> 2 | UInt16(newValue[6] >> 2)
+ aacFrames = newValue[6] & 0b00000011
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift
new file mode 100644
index 000000000..4e261efc9
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift
@@ -0,0 +1,216 @@
+import AVFoundation
+
+/// The AudioCodec translate audio data to another format.
+/// - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
+final class AudioCodec {
+ static let defaultFrameCapacity: UInt32 = 1024
+ static let defaultInputBuffersCursor = 0
+
+ var settings: AudioCodecSettings = .default {
+ didSet {
+ if settings.invalidateConverter(oldValue) {
+ inputFormat = nil
+ } else {
+ settings.apply(audioConverter, oldValue: oldValue)
+ }
+ }
+ }
+
+ var outputFormat: AVAudioFormat? {
+ return audioConverter?.outputFormat
+ }
+
+ @AsyncStreamedFlow
+ var outputStream: AsyncStream<(AVAudioBuffer, AVAudioTime)>
+
+ /// This instance is running to process(true) or not(false).
+ private(set) var isRunning = false
+ private(set) var inputFormat: AVAudioFormat? {
+ didSet {
+ guard inputFormat != oldValue else {
+ return
+ }
+ inputBuffers.removeAll()
+ inputBuffersCursor = Self.defaultInputBuffersCursor
+ outputBuffers.removeAll()
+ audioConverter = makeAudioConverter()
+ for _ in 0.. AVAudioBuffer? {
+ guard let inputFormat else {
+ return nil
+ }
+ switch inputFormat.formatDescription.mediaSubType {
+ case .linearPCM:
+ let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.defaultFrameCapacity)
+ buffer?.frameLength = Self.defaultFrameCapacity
+ return buffer
+ default:
+ return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024)
+ }
+ }
+
+ private func makeAudioConverter() -> AVAudioConverter? {
+ guard
+ let inputFormat,
+ let outputFormat = settings.format.makeOutputAudioFormat(inputFormat, sampleRate: settings.sampleRate, channelMap: settings.channelMap) else {
+ return nil
+ }
+ let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
+ settings.apply(converter, oldValue: nil)
+ if inputFormat.formatDescription.mediaSubType == .linearPCM {
+ ringBuffer = AudioRingBuffer(inputFormat)
+ }
+ if self.outputFormat?.sampleRate != outputFormat.sampleRate {
+ audioTime.reset()
+ }
+ if logger.isEnabledFor(level: .info) {
+ logger.info("converter:", converter ?? "nil", ",inputFormat:", inputFormat, ",outputFormat:", outputFormat)
+ }
+ return converter
+ }
+}
+
+extension AudioCodec: Codec {
+ // MARK: Codec
+ typealias Buffer = AVAudioBuffer
+
+ var outputBuffer: AVAudioBuffer {
+ guard let outputFormat = audioConverter?.outputFormat else {
+ return .init()
+ }
+ if outputBuffers.isEmpty {
+ for _ in 0.. Float64 {
+ let sampleRate = output == 0 ? input : output
+ guard let supportedSampleRate else {
+ return sampleRate
+ }
+ return supportedSampleRate.sorted { pow($0 - sampleRate, 2) < pow($1 - sampleRate, 2) }.first ?? sampleRate
+ }
+
+ func makeFramesPerPacket(_ sampleRate: Double) -> UInt32 {
+ switch self {
+ case .aac:
+ return 1024
+ case .opus:
+ // https://www.rfc-editor.org/rfc/rfc6716#section-2.1.4
+ let frameDurationSec = 0.02
+ return UInt32(sampleRate * frameDurationSec)
+ case .pcm:
+ return 1
+ }
+ }
+
+ func makeAudioBuffer(_ format: AVAudioFormat) -> AVAudioBuffer? {
+ switch self {
+ case .aac:
+ return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024 * Int(format.channelCount))
+ case .opus:
+ return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024 * Int(format.channelCount))
+ case .pcm:
+ return AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024)
+ }
+ }
+
+ func makeOutputAudioFormat(_ format: AVAudioFormat, sampleRate: Float64, channelMap: [Int]?) -> AVAudioFormat? {
+ let channelCount: UInt32
+ if let channelMap {
+ channelCount = UInt32(channelMap.count)
+ } else {
+ channelCount = format.channelCount
+ }
+ let mSampleRate = makeSampleRate(format.sampleRate, output: sampleRate)
+ let config = AudioSpecificConfig.ChannelConfiguration(channelCount: channelCount)
+ var streamDescription = AudioStreamBasicDescription(
+ mSampleRate: mSampleRate,
+ mFormatID: formatID,
+ mFormatFlags: formatFlags,
+ mBytesPerPacket: bytesPerPacket,
+ mFramesPerPacket: makeFramesPerPacket(mSampleRate),
+ mBytesPerFrame: bytesPerFrame,
+ mChannelsPerFrame: min(
+ config?.channelCount ?? format.channelCount,
+ AudioCodecSettings.maximumNumberOfChannels
+ ),
+ mBitsPerChannel: bitsPerChannel,
+ mReserved: 0
+ )
+ return AVAudioFormat(
+ streamDescription: &streamDescription,
+ channelLayout: config?.audioChannelLayout
+ )
+ }
+ }
+
+ /// Specifies the bitRate of audio output.
+ public var bitRate: Int
+
+ /// Specifies the mixes the channels or not.
+ public var downmix: Bool
+
+ /// Specifies the map of the output to input channels.
+ public var channelMap: [Int]?
+
+ /// Specifies the sampleRate of audio output. A value of 0 will be the same as the main track source.
+ public let sampleRate: Float64
+
+ /// Specifies the output format.
+ public var format: AudioCodecSettings.Format = .aac
+
+ /// Creates a new instance.
+ public init(
+ bitRate: Int = AudioCodecSettings.defaultBitRate,
+ downmix: Bool = true,
+ channelMap: [Int]? = nil,
+ sampleRate: Float64 = 0,
+ format: AudioCodecSettings.Format = .aac
+ ) {
+ self.bitRate = bitRate
+ self.downmix = downmix
+ self.channelMap = channelMap
+ self.sampleRate = sampleRate
+ self.format = format
+ }
+
+ func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) {
+ guard let converter else {
+ return
+ }
+ if bitRate != oldValue?.bitRate {
+ let minAvailableBitRate = converter.applicableEncodeBitRates?.min(by: { a, b in
+ return a.intValue < b.intValue
+ })?.intValue ?? bitRate
+ let maxAvailableBitRate = converter.applicableEncodeBitRates?.max(by: { a, b in
+ return a.intValue < b.intValue
+ })?.intValue ?? bitRate
+ converter.bitRate = min(maxAvailableBitRate, max(minAvailableBitRate, bitRate))
+ }
+
+ if downmix != oldValue?.downmix {
+ converter.downmix = downmix
+ }
+
+ if channelMap != oldValue?.channelMap, let newChannelMap = validatedChannelMap(converter) {
+ converter.channelMap = newChannelMap
+ }
+ }
+
+ func invalidateConverter(_ rhs: AudioCodecSettings) -> Bool {
+ return !(format == rhs.format && channelMap == rhs.channelMap)
+ }
+
+ private func validatedChannelMap(_ converter: AVAudioConverter) -> [NSNumber]? {
+ guard let channelMap, channelMap.count == converter.outputFormat.channelCount else {
+ return nil
+ }
+ for inputChannel in channelMap where converter.inputFormat.channelCount <= inputChannel {
+ return nil
+ }
+ return channelMap.map { NSNumber(value: $0) }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift
new file mode 100644
index 000000000..0ac41e115
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift
@@ -0,0 +1,9 @@
+import Foundation
+
+protocol Codec {
+ associatedtype Buffer
+
+ var outputBuffer: Buffer { get }
+
+ func releaseOutputBuffer(_ buffer: Buffer)
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift
new file mode 100644
index 000000000..9278c6eec
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift
@@ -0,0 +1,30 @@
+import AVFoundation
+import Foundation
+import VideoToolbox
+
+enum VTSessionError: Swift.Error {
+ case failedToCreate(status: OSStatus)
+ case failedToPrepare(status: OSStatus)
+ case failedToConvert(status: OSStatus)
+}
+
+protocol VTSessionConvertible {
+ func setOption(_ option: VTSessionOption) -> OSStatus
+ func setOptions(_ options: Set) -> OSStatus
+ func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws
+ func invalidate()
+}
+
+extension VTSessionConvertible where Self: VTSession {
+ func setOption(_ option: VTSessionOption) -> OSStatus {
+ return VTSessionSetProperty(self, key: option.key.CFString, value: option.value)
+ }
+
+ func setOptions(_ options: Set) -> OSStatus {
+ var properties: [AnyHashable: AnyObject] = [:]
+ for option in options {
+ properties[option.key.CFString] = option.value
+ }
+ return VTSessionSetProperties(self, propertyDictionary: properties as CFDictionary)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift
new file mode 100644
index 000000000..b47968f87
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift
@@ -0,0 +1,59 @@
+import Foundation
+import VideoToolbox
+
+enum VTSessionMode {
+ case compression
+ case decompression
+
+ func makeSession(_ videoCodec: VideoCodec) throws -> any VTSessionConvertible {
+ switch self {
+ case .compression:
+ var session: VTCompressionSession?
+ var status = VTCompressionSessionCreate(
+ allocator: kCFAllocatorDefault,
+ width: Int32(videoCodec.settings.videoSize.width),
+ height: Int32(videoCodec.settings.videoSize.height),
+ codecType: videoCodec.settings.format.codecType,
+ encoderSpecification: videoCodec.settings.makeEncoderSpecification(),
+ imageBufferAttributes: videoCodec.makeImageBufferAttributes(.compression) as CFDictionary?,
+ compressedDataAllocator: nil,
+ outputCallback: nil,
+ refcon: nil,
+ compressionSessionOut: &session
+ )
+ guard status == noErr, let session else {
+ throw VTSessionError.failedToCreate(status: status)
+ }
+ status = session.setOptions(videoCodec.settings.makeOptions())
+ guard status == noErr else {
+ throw VTSessionError.failedToPrepare(status: status)
+ }
+ status = session.prepareToEncodeFrames()
+ guard status == noErr else {
+ throw VTSessionError.failedToPrepare(status: status)
+ }
+ if let expectedFrameRate = videoCodec.settings.expectedFrameRate {
+ status = session.setOption(.init(key: .expectedFrameRate, value: expectedFrameRate as CFNumber))
+ }
+ videoCodec.frameInterval = videoCodec.settings.frameInterval
+ return session
+ case .decompression:
+ guard let formatDescription = videoCodec.inputFormat else {
+ throw VTSessionError.failedToCreate(status: kVTParameterErr)
+ }
+ var session: VTDecompressionSession?
+ let status = VTDecompressionSessionCreate(
+ allocator: kCFAllocatorDefault,
+ formatDescription: formatDescription,
+ decoderSpecification: nil,
+ imageBufferAttributes: videoCodec.makeImageBufferAttributes(.decompression) as CFDictionary?,
+ outputCallback: nil,
+ decompressionSessionOut: &session
+ )
+ guard let session, status == noErr else {
+ throw VTSessionError.failedToCreate(status: status)
+ }
+ return session
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift
new file mode 100644
index 000000000..98801244e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift
@@ -0,0 +1,18 @@
+import Foundation
+
+/// A structure that represents Key-Value-Object for the VideoToolbox option.
+struct VTSessionOption {
+ let key: VTSessionOptionKey
+ let value: AnyObject
+}
+
+extension VTSessionOption: Hashable {
+ // MARK: Hashable
+ static func == (lhs: VTSessionOption, rhs: VTSessionOption) -> Bool {
+ return lhs.key.CFString == rhs.key.CFString
+ }
+
+ func hash(into hasher: inout Hasher) {
+ return hasher.combine(key.CFString)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift
new file mode 100644
index 000000000..57575a1c2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift
@@ -0,0 +1,65 @@
+import Foundation
+import VideoToolbox
+
+struct VTSessionOptionKey: Codable, RawRepresentable {
+ typealias RawValue = String
+
+ static let depth = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_Depth as String)
+ static let profileLevel = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ProfileLevel as String)
+ static let H264EntropyMode = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_H264EntropyMode as String)
+ static let numberOfPendingFrames = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_NumberOfPendingFrames as String)
+ static let pixelBufferPoolIsShared = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelBufferPoolIsShared as String)
+ static let videoEncoderPixelBufferAttributes = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_VideoEncoderPixelBufferAttributes as String)
+ static let aspectRatio16x9 = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AspectRatio16x9 as String)
+ static let cleanAperture = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_CleanAperture as String)
+ static let fieldCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_FieldCount as String)
+ static let fieldDetail = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_FieldDetail as String)
+ static let pixelAspectRatio = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelAspectRatio as String)
+ static let progressiveScan = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ProgressiveScan as String)
+ static let colorPrimaries = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ColorPrimaries as String)
+ static let transferFunction = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_TransferFunction as String)
+ static let YCbCrMatrix = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_YCbCrMatrix as String)
+ static let ICCProfile = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ICCProfile as String)
+ static let expectedDuration = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ExpectedDuration as String)
+ static let expectedFrameRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ExpectedFrameRate as String)
+ static let sourceFrameCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_SourceFrameCount as String)
+ static let allowFrameReordering = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AllowFrameReordering as String)
+ static let allowTemporalCompression = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AllowTemporalCompression as String)
+ static let maxKeyFrameInterval = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxKeyFrameInterval as String)
+ static let maxKeyFrameIntervalDuration = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration as String)
+
+ #if os(macOS)
+ static let usingHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder as String)
+ static let requireHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder as String)
+ static let enableHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder as String)
+ #endif
+
+ static let multiPassStorage = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MultiPassStorage as String)
+ static let forceKeyFrame = VTSessionOptionKey(rawValue: kVTEncodeFrameOptionKey_ForceKeyFrame as String)
+ static let pixelTransferProperties = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelTransferProperties as String)
+ static let averageBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AverageBitRate as String)
+ static let dataRateLimits = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_DataRateLimits as String)
+ static let moreFramesAfterEnd = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MoreFramesAfterEnd as String)
+ static let moreFramesBeforeStart = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MoreFramesBeforeStart as String)
+ static let quality = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_Quality as String)
+ static let realTime = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_RealTime as String)
+ static let maxH264SliceBytes = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxH264SliceBytes as String)
+ static let maxFrameDelayCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxFrameDelayCount as String)
+ static let encoderID = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_EncoderID as String)
+
+ @available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
+ static let constantBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ConstantBitRate as String)
+
+ @available(iOS 26.0, tvOS 26.0, macOS 26.0, *)
+ static let variableBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_VariableBitRate as String)
+
+ let rawValue: String
+
+ var CFString: CFString {
+ return rawValue as CFString
+ }
+
+ init(rawValue: String) {
+ self.rawValue = rawValue
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift
new file mode 100644
index 000000000..8dd08ae2f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift
@@ -0,0 +1,174 @@
+import AVFoundation
+import CoreFoundation
+import VideoToolbox
+#if canImport(UIKit)
+import UIKit
+#endif
+
+final class VideoCodec {
+ static let frameInterval: Double = 0.0
+
+ var settings: VideoCodecSettings = .default {
+ didSet {
+ let invalidateSession = settings.invalidateSession(oldValue)
+ if invalidateSession {
+ self.invalidateSession = invalidateSession
+ } else {
+ settings.apply(self, rhs: oldValue)
+ }
+ }
+ }
+ var passthrough = true
+ var outputStream: AsyncStream {
+ AsyncStream { continuation in
+ self.continuation = continuation
+ }
+ }
+ var frameInterval = VideoCodec.frameInterval
+ private var startedAt: CMTime = .zero
+ private var continuation: AsyncStream.Continuation?
+ private var invalidateSession = true
+ private var presentationTimeStamp: CMTime = .zero
+ private(set) var isRunning = false
+ private(set) var inputFormat: CMFormatDescription? {
+ didSet {
+ guard inputFormat != oldValue else {
+ return
+ }
+ invalidateSession = true
+ outputFormat = nil
+ }
+ }
+ private(set) var session: (any VTSessionConvertible)? {
+ didSet {
+ oldValue?.invalidate()
+ invalidateSession = false
+ }
+ }
+ private(set) var outputFormat: CMFormatDescription?
+
+ func append(_ sampleBuffer: CMSampleBuffer) {
+ guard isRunning else {
+ return
+ }
+ do {
+ inputFormat = sampleBuffer.formatDescription
+ if invalidateSession {
+ if sampleBuffer.formatDescription?.isCompressed == true {
+ session = try VTSessionMode.decompression.makeSession(self)
+ } else {
+ session = try VTSessionMode.compression.makeSession(self)
+ }
+ }
+ guard let session, let continuation else {
+ return
+ }
+ if sampleBuffer.formatDescription?.isCompressed == true {
+ try session.convert(sampleBuffer, continuation: continuation)
+ } else {
+ if useFrame(sampleBuffer.presentationTimeStamp) {
+ try session.convert(sampleBuffer, continuation: continuation)
+ presentationTimeStamp = sampleBuffer.presentationTimeStamp
+ }
+ }
+ } catch {
+ logger.warn(error)
+ }
+ }
+
+ func makeImageBufferAttributes(_ mode: VTSessionMode) -> [NSString: AnyObject]? {
+ switch mode {
+ case .compression:
+ var attributes: [NSString: AnyObject] = [:]
+ if let inputFormat {
+ // Specify the pixel format of the uncompressed video.
+ attributes[kCVPixelBufferPixelFormatTypeKey] = inputFormat.mediaType.rawValue as CFNumber
+ }
+ return attributes.isEmpty ? nil : attributes
+ case .decompression:
+ return [
+ kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
+ kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
+ ]
+ }
+ }
+
+ private func useFrame(_ presentationTimeStamp: CMTime) -> Bool {
+ guard startedAt <= presentationTimeStamp else {
+ return false
+ }
+ guard self.presentationTimeStamp < presentationTimeStamp else {
+ return false
+ }
+ guard Self.frameInterval < frameInterval else {
+ return true
+ }
+ return frameInterval <= presentationTimeStamp.seconds - self.presentationTimeStamp.seconds
+ }
+
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ @objc
+ private func applicationWillEnterForeground(_ notification: Notification) {
+ invalidateSession = true
+ }
+
+ @objc
+ private func didAudioSessionInterruption(_ notification: Notification) {
+ guard
+ let userInfo: [AnyHashable: Any] = notification.userInfo,
+ let value: NSNumber = userInfo[AVAudioSessionInterruptionTypeKey] as? NSNumber,
+ let type = AVAudioSession.InterruptionType(rawValue: value.uintValue) else {
+ return
+ }
+ switch type {
+ case .ended:
+ invalidateSession = true
+ default:
+ break
+ }
+ }
+ #endif
+}
+
+extension VideoCodec: Runner {
+ // MARK: Running
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ NotificationCenter.default.addObserver(
+ self,
+ selector: #selector(self.didAudioSessionInterruption),
+ name: AVAudioSession.interruptionNotification,
+ object: nil
+ )
+ NotificationCenter.default.addObserver(
+ self,
+ selector: #selector(self.applicationWillEnterForeground),
+ name: UIApplication.willEnterForegroundNotification,
+ object: nil
+ )
+ #endif
+ startedAt = passthrough ? .zero : CMClockGetTime(CMClockGetHostTimeClock())
+ isRunning = true
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ isRunning = false
+ session = nil
+ invalidateSession = true
+ inputFormat = nil
+ outputFormat = nil
+ presentationTimeStamp = .zero
+ continuation?.finish()
+ startedAt = .zero
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
+ NotificationCenter.default.removeObserver(self, name: UIApplication.willEnterForegroundNotification, object: nil)
+ #endif
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift
new file mode 100644
index 000000000..9e4bbf167
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift
@@ -0,0 +1,229 @@
+import Foundation
+import VideoToolbox
+
+/// Constraints on the video codec compression settings.
+public struct VideoCodecSettings: Codable, Sendable {
+ /// The number of frame rate for 30fps.
+ public static let frameInterval30 = (1 / 30) - 0.001
+ /// The number of frame rate for 10fps.
+ public static let frameInterval10 = (1 / 10) - 0.001
+ /// The number of frame rate for 5fps.
+ public static let frameInterval05 = (1 / 05) - 0.001
+ /// The number of frame rate for 1fps.
+ public static let frameInterval01 = (1 / 01) - 0.001
+
+ /// The defulat value.
+ public static let `default` = VideoCodecSettings()
+
+ /// A bitRate mode that affectes how to encode the video source.
+ public struct BitRateMode: Sendable, CustomStringConvertible, Codable, Hashable, Equatable {
+ public static func == (lhs: VideoCodecSettings.BitRateMode, rhs: VideoCodecSettings.BitRateMode) -> Bool {
+ lhs.key == rhs.key
+ }
+
+ /// The average bit rate.
+ public static let average = BitRateMode(key: .averageBitRate)
+
+ /// The constant bit rate.
+ @available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
+ public static let constant = BitRateMode(key: .constantBitRate)
+
+ /// The variable bit rate.
+ /// - seealso: [kVTCompressionPropertyKey_VariableBitRate](https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_variablebitrate)
+ @available(iOS 26.0, tvOS 26.0, macOS 26.0, *)
+ public static let variable = BitRateMode(key: .variableBitRate)
+
+ let key: VTSessionOptionKey
+
+ public var description: String {
+ key.CFString as String
+ }
+
+ public func hash(into hasher: inout Hasher) {
+ return hasher.combine(description)
+ }
+ }
+
+ /**
+ * The scaling mode.
+ * - seealso: https://developer.apple.com/documentation/videotoolbox/kvtpixeltransferpropertykey_scalingmode
+ * - seealso: https://developer.apple.com/documentation/videotoolbox/vtpixeltransfersession/pixel_transfer_properties/scaling_mode_constants
+ */
+ public enum ScalingMode: String, Codable, Sendable {
+ /// kVTScalingMode_Normal
+ case normal = "Normal"
+ /// kVTScalingMode_Letterbox
+ case letterbox = "Letterbox"
+ /// kVTScalingMode_CropSourceToCleanAperture
+ case cropSourceToCleanAperture = "CropSourceToCleanAperture"
+ /// kVTScalingMode_Trim
+ case trim = "Trim"
+ }
+
+ /// The type of the VideoCodec supports format.
+ package enum Format: Codable, Sendable, CaseIterable {
+ case h264
+ case hevc
+
+ #if os(macOS)
+ var encoderID: NSString {
+ switch self {
+ case .h264:
+ #if arch(arm64)
+ return NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc")
+ #else
+ return NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva")
+ #endif
+ case .hevc:
+ return NSString(string: "com.apple.videotoolbox.videoencoder.ave.hevc")
+ }
+ }
+ #endif
+
+ var codecType: UInt32 {
+ switch self {
+ case .h264:
+ return kCMVideoCodecType_H264
+ case .hevc:
+ return kCMVideoCodecType_HEVC
+ }
+ }
+ }
+
+ /// Specifies the video size of encoding video.
+ public var videoSize: CGSize
+ /// Specifies the bitrate.
+ public var bitRate: Int
+ /// Specifies the H264 profileLevel.
+ public var profileLevel: String {
+ didSet {
+ if profileLevel.contains("HEVC") {
+ format = .hevc
+ } else {
+ format = .h264
+ }
+ }
+ }
+ /// Specifies the scalingMode.
+ public var scalingMode: ScalingMode
+ /// Specifies the bitRateMode.
+ public var bitRateMode: BitRateMode
+ /// Specifies the keyframeInterval.
+ public var maxKeyFrameIntervalDuration: Int32
+ /// Specifies the allowFrameRecording.
+ public var allowFrameReordering: Bool? // swiftlint:disable:this discouraged_optional_boolean
+ /// Specifies the dataRateLimits
+ public var dataRateLimits: [Double]?
+ /// Specifies the low-latency opretaion for an encoder.
+ public var isLowLatencyRateControlEnabled: Bool
+ /// Specifies the hardware accelerated encoder is enabled(TRUE), or not(FALSE) for macOS.
+ public var isHardwareAcceleratedEnabled: Bool
+ /// Specifies the video frame interval.
+ public var frameInterval: Double = 0.0
+ /// Specifies the expected frame rate for an encoder. It may optimize power consumption.
+ public var expectedFrameRate: Double?
+
+ package var format: Format = .h264
+
+ /// Creates a new VideoCodecSettings instance.
+ public init(
+ videoSize: CGSize = .init(width: 854, height: 480),
+ bitRate: Int = 640 * 1000,
+ profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
+ scalingMode: ScalingMode = .trim,
+ bitRateMode: BitRateMode = .average,
+ maxKeyFrameIntervalDuration: Int32 = 2,
+ // swiftlint:disable discouraged_optional_boolean
+ allowFrameReordering: Bool? = nil,
+ // swiftlint:enable discouraged_optional_boolean
+ dataRateLimits: [Double]? = [0.0, 0.0],
+ isLowLatencyRateControlEnabled: Bool = false,
+ isHardwareAcceleratedEnabled: Bool = true,
+ expectedFrameRate: Double? = nil
+ ) {
+ self.videoSize = videoSize
+ self.bitRate = bitRate
+ self.profileLevel = profileLevel
+ self.scalingMode = scalingMode
+ self.bitRateMode = bitRateMode
+ self.maxKeyFrameIntervalDuration = maxKeyFrameIntervalDuration
+ self.allowFrameReordering = allowFrameReordering
+ self.dataRateLimits = dataRateLimits
+ self.isLowLatencyRateControlEnabled = isLowLatencyRateControlEnabled
+ self.isHardwareAcceleratedEnabled = isHardwareAcceleratedEnabled
+ self.expectedFrameRate = expectedFrameRate
+ if profileLevel.contains("HEVC") {
+ self.format = .hevc
+ }
+ }
+
+ func invalidateSession(_ rhs: VideoCodecSettings) -> Bool {
+ return !(videoSize == rhs.videoSize &&
+ maxKeyFrameIntervalDuration == rhs.maxKeyFrameIntervalDuration &&
+ scalingMode == rhs.scalingMode &&
+ allowFrameReordering == rhs.allowFrameReordering &&
+ bitRateMode == rhs.bitRateMode &&
+ profileLevel == rhs.profileLevel &&
+ dataRateLimits == rhs.dataRateLimits &&
+ isLowLatencyRateControlEnabled == rhs.isLowLatencyRateControlEnabled &&
+ isHardwareAcceleratedEnabled == rhs.isHardwareAcceleratedEnabled
+ )
+ }
+
+ func apply(_ codec: VideoCodec, rhs: VideoCodecSettings) {
+ if bitRate != rhs.bitRate {
+ logger.info("bitRate change from ", rhs.bitRate, " to ", bitRate)
+ let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitRate))
+ _ = codec.session?.setOption(option)
+ }
+ if frameInterval != rhs.frameInterval {
+ codec.frameInterval = frameInterval
+ }
+ if expectedFrameRate != rhs.expectedFrameRate {
+ let value = if let expectedFrameRate { expectedFrameRate } else { 0.0 }
+ let option = VTSessionOption(key: .expectedFrameRate, value: value as CFNumber)
+ _ = codec.session?.setOption(option)
+ }
+ }
+
+ // https://developer.apple.com/documentation/videotoolbox/encoding_video_for_live_streaming
+ func makeOptions() -> Set {
+ let isBaseline = profileLevel.contains("Baseline")
+ var options = Set([
+ .init(key: .realTime, value: kCFBooleanTrue),
+ .init(key: .profileLevel, value: profileLevel as NSObject),
+ .init(key: bitRateMode.key, value: NSNumber(value: bitRate)),
+ .init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
+ .init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
+ .init(key: .pixelTransferProperties, value: [
+ "ScalingMode": scalingMode.rawValue
+ ] as NSObject)
+ ])
+ if bitRateMode == .average {
+ if let dataRateLimits, dataRateLimits.count == 2 {
+ var limits = [Double](repeating: 0.0, count: 2)
+ limits[0] = dataRateLimits[0] == 0 ? Double(bitRate) / 8 * 1.5 : dataRateLimits[0]
+ limits[1] = dataRateLimits[1] == 0 ? Double(1.0) : dataRateLimits[1]
+ options.insert(.init(key: .dataRateLimits, value: limits as NSArray))
+ }
+ }
+ #if os(macOS)
+ if isHardwareAcceleratedEnabled {
+ options.insert(.init(key: .encoderID, value: format.encoderID))
+ options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
+ options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
+ }
+ #endif
+ if !isBaseline && profileLevel.contains("H264") {
+ options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
+ }
+ return options
+ }
+
+ func makeEncoderSpecification() -> CFDictionary? {
+ if isLowLatencyRateControlEnabled {
+ return [kVTVideoEncoderSpecification_EnableLowLatencyRateControl: true as CFBoolean] as CFDictionary
+ }
+ return nil
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md
new file mode 100644
index 000000000..80f3d2e4a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md
@@ -0,0 +1,86 @@
+# ``HaishinKit``
+メインモジュールです。
+
+## 🔍 概要
+ライブストリーミングに必要なカメラやマイクのミキシング機能の提供を行います。各モジュールに対して共通の処理を提供します。
+
+### モジュール構成
+|モジュール|説明|
+|:-|:-|
+|HaishinKit|本モジュールです。|
+|RTMPHaishinKit|RTMPプロトコルスタックを提供します。|
+|SRTHaishinKit|SRTプロトコルスタックを提供します。|
+|RTCHaishinKit|WebRTCのWHEP/WHIPプロトコルスタックを提供します。現在α版です。|
+|MoQTHaishinKit|MoQTプロトコルスタックを提供します。現在α版です。
+
+## 🎨 機能
+以下の機能を提供しています。
+- ライブミキシング
+ - [映像のミキシング](doc://HaishinKit/videomixing)
+ - カメラ映像や静止画を一つの配信映像ソースとして扱います。
+ - 音声のミキシング
+ - 異なるマイク音声を合成して一つの配信音声ソースとして扱います。
+- Session
+ - RTMP/SRT/WHEP/WHIPといったプロトコルを統一的なAPIで扱えます。
+
+## 📖 利用方法
+### ライブミキシング
+```swift
+let mixer = MediaMixer()
+
+Task {
+ do {
+ // Attaches the microphone device.
+ try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
+ } catch {
+ print(error)
+ }
+
+ do {
+ // Attaches the camera device.
+ try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
+ } catch {
+ print(error)
+ }
+
+ // Associates the stream object with the MediaMixer.
+ await mixer.addOutput(stream)
+ await mixer.startRunning()
+}
+```
+
+### Session api.
+RTMPやSRTとのクライアントとしての実装を統一的なAPIで扱えます。リトライ処理などもAPI内部で行います。
+
+#### 前準備
+```swift
+import HaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+
+Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+}
+```
+
+#### Sessionの作成
+```swift
+let session = try await SessionBuilderFactory.shared.make(URL(string: "rtmp://hostname/live/live"))
+ .setMode(.ingest)
+ .build()
+```
+```swift
+let session = try await SessionBuilderFactory.shared.make(URL(string: "srt://hostname:448?stream=xxxxx"))
+ .setMode(.playback)
+ .build()
+```
+
+#### 接続
+配信や視聴を行います。
+```swift
+try session.connect {
+ print("on disconnected")
+}
+```
+
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md
new file mode 100644
index 000000000..9e4b4fe29
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md
@@ -0,0 +1,12 @@
+# FAQ
+Frequently Asked Questions and Answers from a Technical Perspective.
+
+## Q. Is it possible to use a UVC camera?
+Yes. Starting with iPadOS 17.0, it became available through [the OS API](https://developer.apple.com/documentation/avfoundation/avcapturedevice/devicetype-swift.struct/external). Unfortunately, its operation on iOS has not been confirmed.
+```swift
+if #available(iOS 17.0, *) {
+ let camera = AVCaptureDevice.default(.external, for: .video,
+position: .unspecified)
+ try? await mixer.attachVideo(camera, track: 0)
+}
+```
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md
new file mode 100644
index 000000000..d1174d38c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md
@@ -0,0 +1,90 @@
+# ``HaishinKit``
+This is the main module.
+
+## 🔍 Overview
+Provides camera and microphone mixing functionality required for live streaming.
+It also offers common processing across each module.
+
+### Module Structure
+| Module | Description |
+|:-|:-|
+| HaishinKit | This module. |
+| RTMPHaishinKit | Provides the RTMP protocol stack. |
+| SRTHaishinKit | Provides the SRT protocol stack. |
+| RTCHaishinKit | Provides the WebRTC WHEP/WHIP protocol stack. Currently in alpha. |
+| MoQTHaishinKit | Provides the MoQT protocol stack. Currently in alpha. |
+
+## 🎨 Features
+The following features are available:
+- Live Mixing
+ - [Video Mixing](doc://HaishinKit/videomixing)
+ - Treats camera video and still images as a single stream source.
+ - Audio Mixing
+ - Combines different microphone audio sources into a single audio stream source.
+- Session
+ - Provides a unified API for protocols such as RTMP, SRT, WHEP, and WHIP.
+
+## 📖 Usage
+### Live Mixing
+```swift
+let mixer = MediaMixer()
+
+Task {
+ do {
+ // Attaches the microphone device.
+ try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
+ } catch {
+ print(error)
+ }
+
+ do {
+ // Attaches the camera device.
+ try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
+ } catch {
+ print(error)
+ }
+
+ // Associates the stream object with the MediaMixer.
+ await mixer.addOutput(stream)
+ await mixer.startRunning()
+}
+```
+
+### Session API
+Provides a unified API for implementing clients with RTMP and SRT. Retry handling is also performed internally by the API.
+
+#### Preparation
+```swift
+import HaishinKit
+import RTMPHaishinKit
+import SRTHaishinKit
+
+Task {
+ await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+ await SessionBuilderFactory.shared.register(SRTSessionFactory())
+}
+```
+
+#### Make Session
+**RTMP**
+Please provide the RTMP connection URL combined with the streamName.
+```swift
+let session = try await SessionBuilderFactory.shared.make(URL(string: "rtmp://hostname/appName/stramName"))
+ .setMode(.publish)
+ .build()
+```
+**SRT**
+```swift
+let session = try await SessionBuilderFactory.shared.make(URL(string: "srt://hostname:448?stream=xxxxx"))
+ .setMode(.playback)
+ .build()
+```
+
+#### Connecting
+Used for publishing or playback.
+```swift
+try session.connect {
+ print("on disconnected")
+}
+```
+
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md
new file mode 100644
index 000000000..c71a607d5
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md
@@ -0,0 +1,17 @@
+# Known issue
+
+## 🔍 Overview
+This section lists known issues that cannot be resolved within HaishinKit. It mainly summarizes problems that occur during development with Xcode.
+
+### When Debugging with Xcode
+The following issues may occur while developing with Xcode connected.
+
+#### Application Freezes on Launch
+When `MediaMixer#startRunning()` is executed while the app is launched from Xcode, the application may freeze.
+It has been confirmed that this does not occur when the application is force-quit and then relaunched.
+- iOS18, Xcode16 The issue is still ongoing in the latest version.
+
+#### Freeze When Starting Recording
+When `StreamRecorder#startRecording()` is executed while the app is launched from Xcode, the application may freeze.
+It has been confirmed that this does not occur when the application is force-quit and then relaunched.
+- iOS18, Xcode16 The issue is still ongoing in the latest version.
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md
new file mode 100644
index 000000000..eba80de09
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md
@@ -0,0 +1,57 @@
+# Video mixing
+HaishinKit provides APIs for overlaying still images on camera footage and for embedding text. These features are collectively referred to as [ScreenObjects](https://docs.haishinkit.com/swift/latest/documentation/haishinkit/screenobject).
+Filtering with CIFilter is also supported, and for use cases such as applying a mosaic effect to camera footage, the use of CIFilter is recommended.
+
+## Usage
+Here is an overview of how to use the typical ScreenObject objects.
+
+### ImageScreenObject
+An example of compositing images.
+```swift
+let imageScreenObject = ImageScreenObject()
+let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "")
+if let provider = CGDataProvider(url: imageURL as CFURL) {
+ imageScreenObject.verticalAlignment = .bottom
+ imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0)
+ imageScreenObject.cgImage = CGImage(
+ pngDataProviderSource: provider,
+ decode: nil,
+ shouldInterpolate: false,
+ intent: .defaultIntent
+ )
+} else {
+ print("no image")
+}
+
+try? await mixer.screen.addChild(imageScreenObject)
+```
+
+### VideoTrackScreenObject
+There may be situations where you want to capture the scenery with the rear camera while showing your facial expression with the front camera.
+
+First, set up the cameras as follows. Make sure to remember the track numbers, as they will be used later.
+```swift
+Task {
+ let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
+ try? await mixer.attachVideo(back, track: 0)
+ let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
+ try? await mixer.attachVideo(front, track: 1)
+}
+```
+
+Track number 0 is designed to be rendered across the entire screen. In this case, we are specifying where to render track number 1.
+
+```swift
+Task { @ScreenActor in
+ let videoScreenObject = VideoTrackScreenObject()
+ videoScreenObject.cornerRadius = 32.0
+ videoScreenObject.track = 1
+ videoScreenObject.horizontalAlignment = .right
+ videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
+ videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
+ // You can add a CIFilter-based filter using the registerVideoEffect API.
+ _ = videoScreenObject.registerVideoEffect(MonochromeEffect())
+
+ try? await mixer.screen.addChild(videoScreenObject)
+}
+```
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift
new file mode 100644
index 000000000..aa74d7f1a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift
@@ -0,0 +1,7 @@
+import AVFoundation
+
+#if hasAttribute(retroactive)
+extension AVAudioBuffer: @retroactive @unchecked Sendable {}
+#else
+extension AVAudioBuffer: @unchecked Sendable {}
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift
new file mode 100644
index 000000000..923c597fd
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift
@@ -0,0 +1,32 @@
+import AVFoundation
+import Foundation
+
+extension AVAudioCompressedBuffer {
+ @discardableResult
+ @inline(__always)
+ final func copy(_ buffer: AVAudioBuffer) -> Bool {
+ guard let buffer = buffer as? AVAudioCompressedBuffer else {
+ return false
+ }
+ if let packetDescriptions = buffer.packetDescriptions {
+ self.packetDescriptions?.pointee = packetDescriptions.pointee
+ }
+ packetCount = buffer.packetCount
+ byteLength = buffer.byteLength
+ data.copyMemory(from: buffer.data, byteCount: Int(buffer.byteLength))
+ return true
+ }
+
+ package func encode(to data: inout Data) {
+ guard let config = AudioSpecificConfig(formatDescription: format.formatDescription) else {
+ return
+ }
+ config.encode(to: &data, length: Int(byteLength))
+ data.withUnsafeMutableBytes {
+ guard let baseAddress = $0.baseAddress else {
+ return
+ }
+ memcpy(baseAddress.advanced(by: AudioSpecificConfig.adtsHeaderSize), self.data, Int(self.byteLength))
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift
new file mode 100644
index 000000000..3fc3f3f01
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift
@@ -0,0 +1,117 @@
+import Accelerate
+import AVFoundation
+
+extension AVAudioPCMBuffer {
+ final func makeSampleBuffer(_ when: AVAudioTime) -> CMSampleBuffer? {
+ var status: OSStatus = noErr
+ var sampleBuffer: CMSampleBuffer?
+ status = CMAudioSampleBufferCreateWithPacketDescriptions(
+ allocator: nil,
+ dataBuffer: nil,
+ dataReady: false,
+ makeDataReadyCallback: nil,
+ refcon: nil,
+ formatDescription: format.formatDescription,
+ sampleCount: Int(frameLength),
+ presentationTimeStamp: when.makeTime(),
+ packetDescriptions: nil,
+ sampleBufferOut: &sampleBuffer
+ )
+ guard let sampleBuffer else {
+ logger.warn("CMAudioSampleBufferCreateWithPacketDescriptions returned error: ", status)
+ return nil
+ }
+ status = CMSampleBufferSetDataBufferFromAudioBufferList(
+ sampleBuffer,
+ blockBufferAllocator: kCFAllocatorDefault,
+ blockBufferMemoryAllocator: kCFAllocatorDefault,
+ flags: 0,
+ bufferList: audioBufferList
+ )
+ if status != noErr {
+ logger.warn("CMSampleBufferSetDataBufferFromAudioBufferList returned error: ", status)
+ }
+ return sampleBuffer
+ }
+
+ @discardableResult
+ @inlinable
+ final func copy(_ audioBuffer: AVAudioBuffer) -> Bool {
+ guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer, frameLength == audioBuffer.frameLength else {
+ return false
+ }
+ let numSamples = Int(frameLength)
+ if format.isInterleaved {
+ let channelCount = Int(format.channelCount)
+ switch format.commonFormat {
+ case .pcmFormatInt16:
+ memcpy(int16ChannelData?[0], audioBuffer.int16ChannelData?[0], numSamples * channelCount * 2)
+ case .pcmFormatInt32:
+ memcpy(int32ChannelData?[0], audioBuffer.int32ChannelData?[0], numSamples * channelCount * 4)
+ case .pcmFormatFloat32:
+ memcpy(floatChannelData?[0], audioBuffer.floatChannelData?[0], numSamples * channelCount * 4)
+ default:
+ break
+ }
+ } else {
+ for i in 0.. AVAudioPCMBuffer? {
+ guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else {
+ return nil
+ }
+ buffer.frameLength = frameLength
+ buffer.copy(self)
+ return buffer
+ }
+
+ @discardableResult
+ @inlinable
+ final func muted(_ isMuted: Bool) -> AVAudioPCMBuffer {
+ guard isMuted else {
+ return self
+ }
+ let numSamples = Int(frameLength)
+ if format.isInterleaved {
+ let channelCount = Int(format.channelCount)
+ switch format.commonFormat {
+ case .pcmFormatInt16:
+ int16ChannelData?[0].update(repeating: 0, count: numSamples * channelCount)
+ case .pcmFormatInt32:
+ int32ChannelData?[0].update(repeating: 0, count: numSamples * channelCount)
+ case .pcmFormatFloat32:
+ floatChannelData?[0].update(repeating: 0, count: numSamples * channelCount)
+ default:
+ break
+ }
+ } else {
+ for i in 0.. CMTime {
+ return .init(seconds: AVAudioTime.seconds(forHostTime: hostTime), preferredTimescale: 1000000000)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift
new file mode 100644
index 000000000..827c020f2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift
@@ -0,0 +1,21 @@
+import AVFoundation
+import Foundation
+
+@available(tvOS 17.0, *)
+extension AVCaptureDevice {
+ func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
+ if isMultiCamSupported {
+ return formats.first {
+ $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
+ } ?? formats.last {
+ $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
+ }
+ } else {
+ return formats.first {
+ $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
+ } ?? formats.last {
+ $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
+ }
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift
new file mode 100644
index 000000000..b8c4826a4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift
@@ -0,0 +1,42 @@
+import AVFoundation
+import Foundation
+
+#if os(macOS)
+extension AVCaptureDevice.Format {
+ var isMultiCamSupported: Bool {
+ return true
+ }
+}
+#elseif os(visionOS)
+extension AVCaptureDevice.Format {
+ var isMultiCamSupported: Bool {
+ return false
+ }
+}
+#endif
+
+@available(tvOS 17.0, *)
+extension AVCaptureDevice.Format {
+ func isFrameRateSupported(_ frameRate: Float64) -> Bool {
+ var durations: [CMTime] = []
+ var frameRates: [Float64] = []
+ for range in videoSupportedFrameRateRanges {
+ if range.minFrameRate == range.maxFrameRate {
+ durations.append(range.minFrameDuration)
+ frameRates.append(range.maxFrameRate)
+ continue
+ }
+ if range.contains(frameRate: frameRate) {
+ return true
+ }
+ return false
+ }
+ let diff = frameRates.map { abs($0 - frameRate) }
+ if let minElement = diff.min() {
+ for i in 0.. Float64 {
+ max(minFrameRate, min(maxFrameRate, rate))
+ }
+
+ func contains(frameRate: Float64) -> Bool {
+ (minFrameRate...maxFrameRate) ~= frameRate
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift
new file mode 100644
index 000000000..3ee238d1f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift
@@ -0,0 +1,34 @@
+import AVFoundation
+import Foundation
+
+extension AVLayerVideoGravity {
+ func scale(_ display: CGSize, image: CGSize) -> CGAffineTransform {
+ switch self {
+ case .resize:
+ return .init(scaleX: display.width / image.width, y: display.width / image.height)
+ case .resizeAspect:
+ let scale = min(display.width / image.width, display.height / image.height)
+ return .init(scaleX: scale, y: scale)
+ case .resizeAspectFill:
+ let scale = max(display.width / image.width, display.height / image.height)
+ return .init(scaleX: scale, y: scale)
+ default:
+ return .init(scaleX: 1.0, y: 1.0)
+ }
+ }
+
+ func region(_ display: CGRect, image: CGRect) -> CGRect {
+ switch self {
+ case .resize:
+ return image
+ case .resizeAspect:
+ return image
+ case .resizeAspectFill:
+ let x = abs(display.width - image.width) / 2
+ let y = abs(display.height - image.height) / 2
+ return .init(origin: .init(x: x, y: y), size: display.size)
+ default:
+ return image
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift
new file mode 100644
index 000000000..77f9ff2af
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift
@@ -0,0 +1,8 @@
+import CoreGraphics
+import Foundation
+
+extension CGImage {
+ var size: CGSize {
+ return .init(width: width, height: height)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift
new file mode 100644
index 000000000..0ee87509e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift
@@ -0,0 +1,26 @@
+import Accelerate
+import AVFoundation
+import CoreMedia
+
+package extension CMSampleBuffer {
+ @inlinable @inline(__always) var isNotSync: Bool {
+ get {
+ guard !sampleAttachments.isEmpty else {
+ return false
+ }
+ return sampleAttachments[0][.notSync] != nil
+ }
+ set {
+ guard !sampleAttachments.isEmpty else {
+ return
+ }
+ sampleAttachments[0][.notSync] = newValue ? 1 : nil
+ }
+ }
+}
+
+#if hasAttribute(retroactive)
+extension CMSampleBuffer: @retroactive @unchecked Sendable {}
+#else
+extension CMSampleBuffer: @unchecked Sendable {}
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift
new file mode 100644
index 000000000..40388f327
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift
@@ -0,0 +1,15 @@
+import AVFoundation
+import Foundation
+
+extension CMTime {
+ func makeAudioTime() -> AVAudioTime {
+ return .init(sampleTime: value, atRate: Double(timescale))
+ }
+
+ func convertTime(from: CMClock?, to: CMClock? = CMClockGetHostTimeClock()) -> CMTime {
+ guard let from, let to else {
+ return self
+ }
+ return from.convertTime(self, to: to)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift
new file mode 100644
index 000000000..525ba839a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift
@@ -0,0 +1,8 @@
+import AVFoundation
+import Foundation
+
+extension CMVideoDimensions {
+ var size: CGSize {
+ return .init(width: CGFloat(width), height: CGFloat(height))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift
new file mode 100644
index 000000000..db55a514c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift
@@ -0,0 +1,83 @@
+import CoreImage
+import CoreMedia
+
+extension CMVideoFormatDescription {
+ package var isCompressed: Bool {
+ switch CMFormatDescriptionGetMediaSubType(self) {
+ case kCVPixelFormatType_1Monochrome,
+ kCVPixelFormatType_2Indexed,
+ kCVPixelFormatType_8Indexed,
+ kCVPixelFormatType_1IndexedGray_WhiteIsZero,
+ kCVPixelFormatType_2IndexedGray_WhiteIsZero,
+ kCVPixelFormatType_4IndexedGray_WhiteIsZero,
+ kCVPixelFormatType_8IndexedGray_WhiteIsZero,
+ kCVPixelFormatType_16BE555,
+ kCVPixelFormatType_16LE555,
+ kCVPixelFormatType_16LE5551,
+ kCVPixelFormatType_16BE565,
+ kCVPixelFormatType_16LE565,
+ kCVPixelFormatType_24RGB,
+ kCVPixelFormatType_24BGR,
+ kCVPixelFormatType_32ARGB,
+ kCVPixelFormatType_32BGRA,
+ kCVPixelFormatType_32ABGR,
+ kCVPixelFormatType_32RGBA,
+ kCVPixelFormatType_64ARGB,
+ kCVPixelFormatType_48RGB,
+ kCVPixelFormatType_32AlphaGray,
+ kCVPixelFormatType_16Gray,
+ kCVPixelFormatType_30RGB,
+ kCVPixelFormatType_422YpCbCr8,
+ kCVPixelFormatType_4444YpCbCrA8,
+ kCVPixelFormatType_4444YpCbCrA8R,
+ kCVPixelFormatType_4444AYpCbCr8,
+ kCVPixelFormatType_4444AYpCbCr16,
+ kCVPixelFormatType_444YpCbCr8,
+ kCVPixelFormatType_422YpCbCr16,
+ kCVPixelFormatType_422YpCbCr10,
+ kCVPixelFormatType_444YpCbCr10,
+ kCVPixelFormatType_420YpCbCr8Planar,
+ kCVPixelFormatType_420YpCbCr8PlanarFullRange,
+ kCVPixelFormatType_422YpCbCr_4A_8BiPlanar,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
+ kCVPixelFormatType_422YpCbCr8_yuvs,
+ kCVPixelFormatType_422YpCbCr8FullRange,
+ kCVPixelFormatType_OneComponent8,
+ kCVPixelFormatType_TwoComponent8,
+ kCVPixelFormatType_OneComponent16Half,
+ kCVPixelFormatType_OneComponent32Float,
+ kCVPixelFormatType_TwoComponent16Half,
+ kCVPixelFormatType_TwoComponent32Float,
+ kCVPixelFormatType_64RGBAHalf,
+ kCVPixelFormatType_128RGBAFloat,
+ kCVPixelFormatType_Lossy_32BGRA,
+ kCVPixelFormatType_Lossless_32BGRA,
+ kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange,
+ kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange,
+ kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange,
+ kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange,
+ kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange,
+ kCVPixelFormatType_Lossy_422YpCbCr10PackedBiPlanarVideoRange,
+ kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange,
+ kCVPixelFormatType_Lossless_422YpCbCr10PackedBiPlanarVideoRange:
+ return false
+ default:
+ return true
+ }
+ }
+
+ var configurationBox: Data? {
+ guard let atoms = CMFormatDescriptionGetExtension(self, extensionKey: kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms) as? NSDictionary else {
+ return nil
+ }
+ switch mediaSubType {
+ case .h264:
+ return atoms["avcC"] as? Data
+ case .hevc:
+ return atoms["hvcC"] as? Data
+ default:
+ return nil
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift
new file mode 100644
index 000000000..bf04dfae2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift
@@ -0,0 +1,155 @@
+import Accelerate
+import CoreImage
+import CoreVideo
+import Foundation
+
+extension CVPixelBuffer {
+ enum Error: Swift.Error {
+ case failedToLock(_ status: CVReturn)
+ case failedToUnlock(_ status: CVReturn)
+ case unsupportedFormat(_ format: OSType)
+ }
+
+ static let lockFlags = CVPixelBufferLockFlags(rawValue: .zero)
+
+ @inlinable @inline(__always)
+ var size: CGSize {
+ return .init(width: CVPixelBufferGetWidth(self), height: CVPixelBufferGetHeight(self))
+ }
+
+ @inlinable @inline(__always)
+ var dataSize: Int {
+ CVPixelBufferGetDataSize(self)
+ }
+
+ @inlinable @inline(__always)
+ var pixelFormatType: OSType {
+ CVPixelBufferGetPixelFormatType(self)
+ }
+
+ @inlinable @inline(__always)
+ var baseAddress: UnsafeMutableRawPointer? {
+ CVPixelBufferGetBaseAddress(self)
+ }
+
+ @inlinable @inline(__always)
+ var planeCount: Int {
+ CVPixelBufferGetPlaneCount(self)
+ }
+
+ @inlinable @inline(__always)
+ var bytesPerRow: Int {
+ CVPixelBufferGetBytesPerRow(self)
+ }
+
+ @inlinable @inline(__always)
+ var width: Int {
+ CVPixelBufferGetWidth(self)
+ }
+
+ @inlinable @inline(__always)
+ var height: Int {
+ CVPixelBufferGetHeight(self)
+ }
+
+ @inlinable @inline(__always)
+ var formatType: OSType {
+ CVPixelBufferGetPixelFormatType(self)
+ }
+
+ @inline(__always)
+ func copy(_ pixelBuffer: CVPixelBuffer?) throws {
+ // https://stackoverflow.com/questions/53132611/copy-a-cvpixelbuffer-on-any-ios-device
+ try pixelBuffer?.mutate(.readOnly) { pixelBuffer in
+ if planeCount == 0 {
+ let dst = self.baseAddress
+ let src = pixelBuffer.baseAddress
+ let bytesPerRowSrc = pixelBuffer.bytesPerRow
+ let bytesPerRowDst = bytesPerRowSrc
+ if bytesPerRowSrc == bytesPerRowDst {
+ memcpy(dst, src, height * bytesPerRowSrc)
+ } else {
+ var startOfRowSrc = src
+ var startOfRowDst = dst
+ for _ in 0.. CIImage {
+ try lockBaseAddress(.readOnly)
+ let result = CIImage(cvPixelBuffer: self)
+ try unlockBaseAddress(.readOnly)
+ return result
+ }
+
+ @inline(__always)
+ func mutate(_ lockFlags: CVPixelBufferLockFlags, lambda: (CVPixelBuffer) throws -> Void) throws {
+ let status = CVPixelBufferLockBaseAddress(self, lockFlags)
+ guard status == kCVReturnSuccess else {
+ throw Error.failedToLock(status)
+ }
+ defer {
+ CVPixelBufferUnlockBaseAddress(self, lockFlags)
+ }
+ try lambda(self)
+ }
+
+ @inlinable
+ @inline(__always)
+ func baseAddressOfPlane(_ index: Int) -> UnsafeMutableRawPointer? {
+ CVPixelBufferGetBaseAddressOfPlane(self, index)
+ }
+
+ @inlinable
+ @inline(__always)
+ func getHeightOfPlane(_ index: Int) -> Int {
+ CVPixelBufferGetHeightOfPlane(self, index)
+ }
+
+ @inlinable
+ @inline(__always)
+ func bytesPerRawOfPlane(_ index: Int) -> Int {
+ CVPixelBufferGetBytesPerRowOfPlane(self, index)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift
new file mode 100644
index 000000000..b81862008
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift
@@ -0,0 +1,13 @@
+import CoreVideo
+import Foundation
+
+extension CVPixelBufferPool {
+ @discardableResult
+ func createPixelBuffer(_ pixelBuffer: UnsafeMutablePointer) -> CVReturn {
+ return CVPixelBufferPoolCreatePixelBuffer(
+ kCFAllocatorDefault,
+ self,
+ pixelBuffer
+ )
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift
new file mode 100644
index 000000000..dd67e5b6c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift
@@ -0,0 +1,46 @@
+import CoreMedia
+import Foundation
+
+package extension Data {
+ var bytes: [UInt8] {
+ withUnsafeBytes {
+ guard let pointer = $0.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
+ return []
+ }
+ return [UInt8](UnsafeBufferPointer(start: pointer, count: count))
+ }
+ }
+
+ func makeBlockBuffer(advancedBy: Int = 0) -> CMBlockBuffer? {
+ var blockBuffer: CMBlockBuffer?
+ let length = count - advancedBy
+ return withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> CMBlockBuffer? in
+ guard let baseAddress = buffer.baseAddress else {
+ return nil
+ }
+ guard CMBlockBufferCreateWithMemoryBlock(
+ allocator: kCFAllocatorDefault,
+ memoryBlock: nil,
+ blockLength: length,
+ blockAllocator: nil,
+ customBlockSource: nil,
+ offsetToData: 0,
+ dataLength: length,
+ flags: 0,
+ blockBufferOut: &blockBuffer) == noErr else {
+ return nil
+ }
+ guard let blockBuffer else {
+ return nil
+ }
+ guard CMBlockBufferReplaceDataBytes(
+ with: baseAddress.advanced(by: advancedBy),
+ blockBuffer: blockBuffer,
+ offsetIntoDestination: 0,
+ dataLength: length) == noErr else {
+ return nil
+ }
+ return blockBuffer
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift
new file mode 100644
index 000000000..832146f83
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift
@@ -0,0 +1,24 @@
+import Foundation
+
+package extension ExpressibleByIntegerLiteral {
+ var data: Data {
+ return withUnsafePointer(to: self) { value in
+ return Data(bytes: UnsafeRawPointer(value), count: MemoryLayout.size)
+ }
+ }
+
+ init(data: Data) {
+ let diff: Int = MemoryLayout.size - data.count
+ if 0 < diff {
+ var buffer = Data(repeating: 0, count: diff)
+ buffer.append(data)
+ self = buffer.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee }
+ return
+ }
+ self = data.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee }
+ }
+
+ init(data: Slice) {
+ self.init(data: Data(data))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift
new file mode 100644
index 000000000..e33150df9
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift
@@ -0,0 +1,22 @@
+import Foundation
+
+package extension Mirror {
+ var debugDescription: String {
+ var data: [String] = []
+ if let superclassMirror = superclassMirror {
+ for child in superclassMirror.children {
+ guard let label = child.label else {
+ continue
+ }
+ data.append("\(label): \(child.value)")
+ }
+ }
+ for child in children {
+ guard let label = child.label else {
+ continue
+ }
+ data.append("\(label): \(child.value)")
+ }
+ return "\(subjectType){\(data.joined(separator: ","))}"
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift
new file mode 100644
index 000000000..9f2c2f256
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift
@@ -0,0 +1,38 @@
+import Foundation
+import VideoToolbox
+
+extension VTCompressionSession {
+ func prepareToEncodeFrames() -> OSStatus {
+ VTCompressionSessionPrepareToEncodeFrames(self)
+ }
+}
+
+extension VTCompressionSession: VTSessionConvertible {
+ @inline(__always)
+ func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws {
+ guard let imageBuffer = sampleBuffer.imageBuffer else {
+ return
+ }
+ var flags: VTEncodeInfoFlags = []
+ let status = VTCompressionSessionEncodeFrame(
+ self,
+ imageBuffer: imageBuffer,
+ presentationTimeStamp: sampleBuffer.presentationTimeStamp,
+ duration: sampleBuffer.duration,
+ frameProperties: nil,
+ infoFlagsOut: &flags,
+ outputHandler: { _, _, sampleBuffer in
+ if let sampleBuffer {
+ continuation?.yield(sampleBuffer)
+ }
+ }
+ )
+ if status != noErr {
+ throw VTSessionError.failedToConvert(status: status)
+ }
+ }
+
+ func invalidate() {
+ VTCompressionSessionInvalidate(self)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift
new file mode 100644
index 000000000..6f7b6cc50
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift
@@ -0,0 +1,62 @@
+import Foundation
+import VideoToolbox
+
+extension VTDecompressionSession: VTSessionConvertible {
+ static let defaultDecodeFlags: VTDecodeFrameFlags = [
+ ._EnableAsynchronousDecompression,
+ ._EnableTemporalProcessing
+ ]
+
+ @inline(__always)
+ func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws {
+ var flagsOut: VTDecodeInfoFlags = []
+ var _: VTEncodeInfoFlags = []
+ let status = VTDecompressionSessionDecodeFrame(
+ self,
+ sampleBuffer: sampleBuffer,
+ flags: Self.defaultDecodeFlags,
+ infoFlagsOut: &flagsOut,
+ outputHandler: { status, _, imageBuffer, presentationTimeStamp, duration in
+ guard let imageBuffer else {
+ return
+ }
+ var status = noErr
+ var outputFormat: CMFormatDescription?
+ status = CMVideoFormatDescriptionCreateForImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: imageBuffer,
+ formatDescriptionOut: &outputFormat
+ )
+ guard let outputFormat, status == noErr else {
+ return
+ }
+ var timingInfo = CMSampleTimingInfo(
+ duration: duration,
+ presentationTimeStamp: presentationTimeStamp,
+ decodeTimeStamp: .invalid
+ )
+ var sampleBuffer: CMSampleBuffer?
+ status = CMSampleBufferCreateForImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: imageBuffer,
+ dataReady: true,
+ makeDataReadyCallback: nil,
+ refcon: nil,
+ formatDescription: outputFormat,
+ sampleTiming: &timingInfo,
+ sampleBufferOut: &sampleBuffer
+ )
+ if let sampleBuffer {
+ continuation?.yield(sampleBuffer)
+ }
+ }
+ )
+ if status != noErr {
+ throw VTSessionError.failedToConvert(status: status)
+ }
+ }
+
+ func invalidate() {
+ VTDecompressionSessionInvalidate(self)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift
new file mode 100644
index 000000000..424884e2e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift
@@ -0,0 +1,36 @@
+import Accelerate
+import CoreMedia
+import CoreVideo
+import Foundation
+
+extension vImage_Buffer {
+ init?(height: vImagePixelCount, width: vImagePixelCount, pixelBits: UInt32, flags: vImage_Flags) {
+ self.init()
+ guard vImageBuffer_Init(
+ &self,
+ height,
+ width,
+ pixelBits,
+ flags) == kvImageNoError else {
+ return nil
+ }
+ }
+
+ @discardableResult
+ mutating func copy(to cvPixelBuffer: CVPixelBuffer, format: inout vImage_CGImageFormat) -> vImage_Error {
+ let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer).takeRetainedValue()
+ vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB())
+ defer {
+ if let dictionary = CVBufferCopyAttachments(cvPixelBuffer, .shouldNotPropagate) {
+ CVBufferSetAttachments(cvPixelBuffer, dictionary, .shouldPropagate)
+ }
+ }
+ return vImageBuffer_CopyToCVPixelBuffer(
+ &self,
+ &format,
+ cvPixelBuffer,
+ cvImageFormat,
+ nil,
+ vImage_Flags(kvImageNoFlags))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift
new file mode 100644
index 000000000..abf27929b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift
@@ -0,0 +1,299 @@
+import AVFoundation
+
+/**
+ The Audio Specific Config is the global header for MPEG-4 Audio
+ - seealso: http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
+ - seealso: http://wiki.multimedia.cx/?title=Understanding_AAC
+ */
+package struct AudioSpecificConfig: Equatable {
+ package static let adtsHeaderSize: Int = 7
+
+ package enum AudioObjectType: UInt8 {
+ case unknown = 0
+ case aacMain = 1
+ case aacLc = 2
+ case aacSsr = 3
+ case aacLtp = 4
+ case aacSbr = 5
+ case aacScalable = 6
+ case twinqVQ = 7
+ case celp = 8
+ case hxvc = 9
+
+ init?(objectID: MPEG4ObjectID?) {
+ switch objectID {
+ case .aac_Main?:
+ self = .aacMain
+ case .AAC_LC?:
+ self = .aacLc
+ case .AAC_SSR?:
+ self = .aacSsr
+ case .AAC_LTP?:
+ self = .aacLtp
+ case .AAC_SBR?:
+ self = .aacSbr
+ case .aac_Scalable?:
+ self = .aacScalable
+ case .twinVQ?:
+ self = .twinqVQ
+ case .CELP?:
+ self = .celp
+ case .HVXC?:
+ self = .hxvc
+ case .none:
+ return nil
+ @unknown default:
+ return nil
+ }
+ }
+ }
+
+ enum SamplingFrequency: UInt8 {
+ case hz96000 = 0
+ case hz88200 = 1
+ case hz64000 = 2
+ case hz48000 = 3
+ case hz44100 = 4
+ case hz32000 = 5
+ case hz24000 = 6
+ case hz22050 = 7
+ case hz16000 = 8
+ case hz12000 = 9
+ case hz11025 = 10
+ case hz8000 = 11
+ case hz7350 = 12
+
+ var sampleRate: Float64 {
+ switch self {
+ case .hz96000:
+ return 96000
+ case .hz88200:
+ return 88200
+ case .hz64000:
+ return 64000
+ case .hz48000:
+ return 48000
+ case .hz44100:
+ return 44100
+ case .hz32000:
+ return 32000
+ case .hz24000:
+ return 24000
+ case .hz22050:
+ return 22050
+ case .hz16000:
+ return 16000
+ case .hz12000:
+ return 12000
+ case .hz11025:
+ return 11025
+ case .hz8000:
+ return 8000
+ case .hz7350:
+ return 7350
+ }
+ }
+
+ init?(sampleRate: Float64) {
+ switch Int(sampleRate) {
+ case 96000:
+ self = .hz96000
+ case 88200:
+ self = .hz88200
+ case 64000:
+ self = .hz64000
+ case 48000:
+ self = .hz48000
+ case 44100:
+ self = .hz44100
+ case 32000:
+ self = .hz32000
+ case 24000:
+ self = .hz24000
+ case 22050:
+ self = .hz22050
+ case 16000:
+ self = .hz16000
+ case 12000:
+ self = .hz12000
+ case 11025:
+ self = .hz11025
+ case 8000:
+ self = .hz8000
+ case 7350:
+ self = .hz7350
+ default:
+ return nil
+ }
+ }
+ }
+
+ enum ChannelConfiguration: UInt8 {
+ case definedInAOTSpecificConfig = 0
+ case frontCenter = 1
+ case frontLeftAndFrontRight = 2
+ case frontCenterAndFrontLeftAndFrontRight = 3
+ case frontCenterAndFrontLeftAndFrontRightAndBackCenter = 4
+ case frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight = 5
+ case frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE = 6
+ case frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE = 7
+
+ var channelCount: UInt32 {
+ switch self {
+ case .definedInAOTSpecificConfig:
+ return 0
+ case .frontCenter:
+ return 1
+ case .frontLeftAndFrontRight:
+ return 2
+ case .frontCenterAndFrontLeftAndFrontRight:
+ return 3
+ case .frontCenterAndFrontLeftAndFrontRightAndBackCenter:
+ return 4
+ case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight:
+ return 5
+ case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE:
+ return 6
+ case .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE:
+ return 8
+ }
+ }
+
+ var audioChannelLayoutTag: AudioChannelLayoutTag? {
+ switch self {
+ case .definedInAOTSpecificConfig:
+ return nil
+ case .frontCenter:
+ return nil
+ case .frontLeftAndFrontRight:
+ return nil
+ case .frontCenterAndFrontLeftAndFrontRight:
+ return kAudioChannelLayoutTag_MPEG_3_0_B
+ case .frontCenterAndFrontLeftAndFrontRightAndBackCenter:
+ return kAudioChannelLayoutTag_MPEG_4_0_B
+ case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight:
+ return kAudioChannelLayoutTag_MPEG_5_0_D
+ case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE:
+ return kAudioChannelLayoutTag_MPEG_5_1_D
+ case .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE:
+ return kAudioChannelLayoutTag_MPEG_7_1_B
+ }
+ }
+
+ var audioChannelLayout: AVAudioChannelLayout? {
+ guard let audioChannelLayoutTag else {
+ return nil
+ }
+ return AVAudioChannelLayout(layoutTag: audioChannelLayoutTag)
+ }
+
+ init?(channelCount: UInt32) {
+ switch channelCount {
+ case 0:
+ self = .definedInAOTSpecificConfig
+ case 1:
+ self = .frontCenter
+ case 2:
+ self = .frontLeftAndFrontRight
+ case 3:
+ self = .frontCenterAndFrontLeftAndFrontRight
+ case 4:
+ self = .frontCenterAndFrontLeftAndFrontRightAndBackCenter
+ case 5:
+ self = .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight
+ case 6:
+ self = .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE
+ case 8:
+ self = .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE
+ default:
+ return nil
+ }
+ }
+ }
+
+ let type: AudioObjectType
+ let frequency: SamplingFrequency
+ let channelConfig: ChannelConfiguration
+ let frameLengthFlag = false
+
+ package var bytes: [UInt8] {
+ var bytes = [UInt8](repeating: 0, count: 2)
+ bytes[0] = type.rawValue << 3 | (frequency.rawValue >> 1)
+ bytes[1] = (frequency.rawValue & 0x1) << 7 | (channelConfig.rawValue & 0xF) << 3
+ return bytes
+ }
+
+ package init?(bytes: [UInt8]) {
+ guard
+ let type = AudioObjectType(rawValue: bytes[0] >> 3),
+ let frequency = SamplingFrequency(rawValue: (bytes[0] & 0b00000111) << 1 | (bytes[1] >> 7)),
+ let channel = ChannelConfiguration(rawValue: (bytes[1] & 0b01111000) >> 3) else {
+ return nil
+ }
+ self.type = type
+ self.frequency = frequency
+ self.channelConfig = channel
+ }
+
+ init(type: AudioObjectType, frequency: SamplingFrequency, channel: ChannelConfiguration) {
+ self.type = type
+ self.frequency = frequency
+ self.channelConfig = channel
+ }
+
+ package init?(formatDescription: CMFormatDescription?) {
+ guard
+ let streamDescription = formatDescription?.audioStreamBasicDescription,
+ let type = AudioObjectType(objectID: MPEG4ObjectID(rawValue: Int(streamDescription.mFormatFlags))),
+ let frequency = SamplingFrequency(sampleRate: streamDescription.mSampleRate),
+ let channelConfig = ChannelConfiguration(channelCount: streamDescription.mChannelsPerFrame) else {
+ return nil
+ }
+ self.type = type
+ self.frequency = frequency
+ self.channelConfig = channelConfig
+ }
+
+ func encode(to data: inout Data, length: Int) {
+ let fullSize: Int = Self.adtsHeaderSize + length
+ data[0] = 0xFF
+ data[1] = 0xF9
+ data[2] = (type.rawValue - 1) << 6 | (frequency.rawValue << 2) | (channelConfig.rawValue >> 2)
+ data[3] = (channelConfig.rawValue & 3) << 6 | UInt8(fullSize >> 11)
+ data[4] = UInt8((fullSize & 0x7FF) >> 3)
+ data[5] = ((UInt8(fullSize & 7)) << 5) + 0x1F
+ data[6] = 0xFC
+ }
+
+ package func makeAudioFormat() -> AVAudioFormat? {
+ var audioStreamBasicDescription = makeAudioStreamBasicDescription()
+ if let audioChannelLayoutTag = channelConfig.audioChannelLayoutTag {
+ return AVAudioFormat(
+ streamDescription: &audioStreamBasicDescription,
+ channelLayout: AVAudioChannelLayout(layoutTag: audioChannelLayoutTag)
+ )
+ }
+ return AVAudioFormat(streamDescription: &audioStreamBasicDescription)
+ }
+
+ private func makeAudioStreamBasicDescription() -> AudioStreamBasicDescription {
+ AudioStreamBasicDescription(
+ mSampleRate: frequency.sampleRate,
+ mFormatID: kAudioFormatMPEG4AAC,
+ mFormatFlags: UInt32(type.rawValue),
+ mBytesPerPacket: 0,
+ mFramesPerPacket: frameLengthFlag ? 960 : 1024,
+ mBytesPerFrame: 0,
+ mChannelsPerFrame: channelConfig.channelCount,
+ mBitsPerChannel: 0,
+ mReserved: 0
+ )
+ }
+}
+
+extension AudioSpecificConfig: CustomDebugStringConvertible {
+ // MARK: CustomDebugStringConvertible
+ package var debugDescription: String {
+ Mirror(reflecting: self).debugDescription
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift
new file mode 100644
index 000000000..e098110dc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift
@@ -0,0 +1,42 @@
+import CoreMedia
+import Foundation
+
+package enum H264NALUnitType: UInt8, Equatable {
+ case unspec = 0
+ case slice = 1 // P frame
+ case dpa = 2
+ case dpb = 3
+ case dpc = 4
+ case idr = 5 // I frame
+ case sei = 6
+ case sps = 7
+ case pps = 8
+ case aud = 9
+ case eoseq = 10
+ case eostream = 11
+ case fill = 12
+}
+
+// MARK: -
+package struct H264NALUnit: NALUnit, Equatable {
+ package let refIdc: UInt8
+ package let type: H264NALUnitType
+ package let payload: Data
+
+ init(_ data: Data, length: Int) {
+ self.refIdc = data[0] >> 5
+ self.type = H264NALUnitType(rawValue: data[0] & 0x1f) ?? .unspec
+ self.payload = data.subdata(in: 1..> 1) ?? .unspec
+ self.temporalIdPlusOne = data[1] & 0b00011111
+ self.payload = data.subdata(in: 2.., count: UInt32) {
+ self.init(data: Data(bytes: bytes, count: Int(count)))
+ }
+
+ package init?(data: Data?) {
+ guard let data = data else {
+ return nil
+ }
+ self.init(data: data)
+ }
+
+ package func toByteStream() -> Data {
+ let buffer = ByteArray(data: data)
+ var result = Data()
+ while 0 < buffer.bytesAvailable {
+ do {
+ let length: Int = try Int(buffer.readUInt32())
+ result.append(contentsOf: [0x00, 0x00, 0x00, 0x01])
+ result.append(try buffer.readBytes(length))
+ } catch {
+ logger.error("\(buffer)")
+ }
+ }
+ return result
+ }
+
+ static package func toNALFileFormat(_ data: inout Data) {
+ var lastIndexOf = data.count - 1
+ for i in (2..(_ data: inout Data, type: T.Type) -> [T] {
+ var units: [T] = .init()
+ var lastIndexOf = data.count - 1
+ for i in (2.. [Data] {
+ var offset = 0
+ let header = Int(Self.defaultNALUnitHeaderLength)
+ let length = buffer.dataBuffer?.dataLength ?? 0
+ var result: [Data] = []
+
+ if !buffer.isNotSync {
+ if let formatDescription = buffer.formatDescription {
+ result.append(Data([0x09, 0x10]))
+ formatDescription.parameterSets.forEach {
+ result.append($0)
+ }
+ }
+ } else {
+ result.append(Data([0x09, 0x30]))
+ }
+
+ try? buffer.dataBuffer?.withUnsafeMutableBytes { buffer in
+ guard let baseAddress = buffer.baseAddress else {
+ return
+ }
+ while offset + header < length {
+ var nalUnitLength: UInt32 = 0
+ memcpy(&nalUnitLength, baseAddress + offset, header)
+ nalUnitLength = CFSwapInt32BigToHost(nalUnitLength)
+ let start = offset + header
+ let end = start + Int(nalUnitLength)
+ if end <= length {
+ result.append(Data(bytes: baseAddress + start, count: Int(nalUnitLength)))
+ } else {
+ break
+ }
+ offset = end
+ }
+ }
+ return result
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift
new file mode 100644
index 000000000..973dec798
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift
@@ -0,0 +1,148 @@
+import AVFoundation
+
+final class AudioCaptureUnit: CaptureUnit {
+ let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCaptureUnit.lock")
+ var mixerSettings: AudioMixerSettings {
+ get {
+ audioMixer.settings
+ }
+ set {
+ audioMixer.settings = newValue
+ }
+ }
+ var isMonitoringEnabled = false {
+ didSet {
+ if isMonitoringEnabled {
+ monitor.startRunning()
+ } else {
+ monitor.stopRunning()
+ }
+ }
+ }
+ var isMultiTrackAudioMixingEnabled = false
+ var inputFormats: [UInt8: AVAudioFormat] {
+ return audioMixer.inputFormats
+ }
+ var output: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> {
+ AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> { continutation in
+ self.continutation = continutation
+ }
+ }
+ private(set) var isSuspended = false
+ private lazy var audioMixer: any AudioMixer = {
+ if isMultiTrackAudioMixingEnabled {
+ var mixer = AudioMixerByMultiTrack()
+ mixer.delegate = self
+ return mixer
+ } else {
+ var mixer = AudioMixerBySingleTrack()
+ mixer.delegate = self
+ return mixer
+ }
+ }()
+ private var monitor: AudioMonitor = .init()
+
+ #if os(tvOS)
+ private var _devices: [UInt8: Any] = [:]
+ @available(tvOS 17.0, *)
+ var devices: [UInt8: AudioDeviceUnit] {
+ set {
+ _devices = newValue
+ }
+ get {
+ _devices as! [UInt8: AudioDeviceUnit]
+ }
+ }
+ #else
+ var devices: [UInt8: AudioDeviceUnit] = [:]
+ #endif
+
+ private let session: (any CaptureSessionConvertible)
+ private var continutation: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)>.Continuation?
+
+ init(_ session: (some CaptureSessionConvertible), isMultiTrackAudioMixingEnabled: Bool) {
+ self.session = session
+ self.isMultiTrackAudioMixingEnabled = isMultiTrackAudioMixingEnabled
+ }
+
+ #if os(iOS) || os(macOS) || os(tvOS)
+ @available(tvOS 17.0, *)
+ func attachAudio(_ track: UInt8, device: AVCaptureDevice?, configuration: AudioDeviceConfigurationBlock?) throws {
+ try session.configuration { _ in
+ session.detachCapture(devices[track])
+ devices[track] = nil
+ if let device {
+ let capture = try AudioDeviceUnit(track, device: device)
+ capture.setSampleBufferDelegate(self)
+ try? configuration?(capture)
+ session.attachCapture(capture)
+ devices[track] = capture
+ }
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ func makeDataOutput(_ track: UInt8) -> AudioDeviceUnitDataOutput {
+ return .init(track: track, audioMixer: audioMixer)
+ }
+ #endif
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer) {
+ audioMixer.append(track, buffer: buffer)
+ }
+
+ func append(_ track: UInt8, buffer: AVAudioBuffer, when: AVAudioTime) {
+ switch buffer {
+ case let buffer as AVAudioPCMBuffer:
+ audioMixer.append(track, buffer: buffer, when: when)
+ default:
+ break
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ func suspend() {
+ guard !isSuspended else {
+ return
+ }
+ for capture in devices.values {
+ session.detachCapture(capture)
+ }
+ isSuspended = true
+ }
+
+ @available(tvOS 17.0, *)
+ func resume() {
+ guard isSuspended else {
+ return
+ }
+ for capture in devices.values {
+ session.attachCapture(capture)
+ }
+ isSuspended = false
+ }
+
+ func finish() {
+ continutation?.finish()
+ }
+}
+
+extension AudioCaptureUnit: AudioMixerDelegate {
+ // MARK: AudioMixerDelegate
+ func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) {
+ monitor.inputFormat = audioFormat
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ if let audioBuffer = audioBuffer.clone() {
+ continutation?.yield((audioBuffer, when))
+ }
+ monitor.append(audioBuffer, when: when)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift
new file mode 100644
index 000000000..4b1cd6fef
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift
@@ -0,0 +1,74 @@
+import AVFoundation
+import Foundation
+
+#if os(iOS) || os(tvOS) || os(macOS)
+/// Configuration calback block for an AudioDeviceUnit
+@available(tvOS 17.0, *)
+public typealias AudioDeviceConfigurationBlock = @Sendable (AudioDeviceUnit) throws -> Void
+
+/// An object that provides the interface to control the AVCaptureDevice's transport behavior.
+@available(tvOS 17.0, *)
+public final class AudioDeviceUnit: DeviceUnit {
+ /// The output type that this capture audio data output..
+ public typealias Output = AVCaptureAudioDataOutput
+
+ /// The track number.
+ public let track: UInt8
+ /// The input data to a cupture session.
+ public private(set) var input: AVCaptureInput?
+ /// The current audio device object.
+ public private(set) var device: AVCaptureDevice?
+ /// The output data to a sample buffers.
+ public private(set) var output: Output? {
+ didSet {
+ oldValue?.setSampleBufferDelegate(nil, queue: nil)
+ }
+ }
+ /// The connection from a capture input to a capture output.
+ public private(set) var connection: AVCaptureConnection?
+ private var dataOutput: AudioDeviceUnitDataOutput?
+
+ init(_ track: UInt8, device: AVCaptureDevice) throws {
+ input = try AVCaptureDeviceInput(device: device)
+ self.track = track
+ self.device = device
+ output = AVCaptureAudioDataOutput()
+ if let input, let output {
+ connection = AVCaptureConnection(inputPorts: input.ports, output: output)
+ }
+ }
+
+ func setSampleBufferDelegate(_ audioUnit: AudioCaptureUnit?) {
+ dataOutput = audioUnit?.makeDataOutput(track)
+ output?.setSampleBufferDelegate(dataOutput, queue: audioUnit?.lockQueue)
+ }
+}
+
+@available(tvOS 17.0, *)
+final class AudioDeviceUnitDataOutput: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
+ private let track: UInt8
+ private let audioMixer: any AudioMixer
+
+ init(track: UInt8, audioMixer: any AudioMixer) {
+ self.track = track
+ self.audioMixer = audioMixer
+ }
+
+ func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
+ audioMixer.append(track, buffer: sampleBuffer)
+ }
+}
+
+#else
+final class AudioDeviceUnit: DeviceUnit {
+ var output: AVCaptureOutput?
+ var track: UInt8
+ var input: AVCaptureInput?
+ var device: AVCaptureDevice?
+ var connection: AVCaptureConnection?
+
+ init(_ track: UInt8, device: AVCaptureDevice) throws {
+ self.track = track
+ }
+}
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift
new file mode 100644
index 000000000..e6a217300
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift
@@ -0,0 +1,29 @@
+@preconcurrency import AVFoundation
+
+enum AudioMixerError: Swift.Error {
+ /// Mixer is failed to create the AVAudioConverter.
+ case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?)
+ /// Mixer is faild to convert the an audio buffer.
+ case failedToConvert(error: NSError)
+ /// Mixer is unable to provide input data.
+ case unableToProvideInputData
+ /// Mixer is failed to mix the audio buffers.
+ case failedToMix(error: any Error)
+}
+
+protocol AudioMixerDelegate: AnyObject {
+ func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime)
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat)
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime)
+ func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError)
+}
+
+protocol AudioMixer: AnyObject {
+ var delegate: (any AudioMixerDelegate)? { get set }
+ var settings: AudioMixerSettings { get set }
+ var inputFormats: [UInt8: AVAudioFormat] { get }
+ var outputFormat: AVAudioFormat? { get }
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer)
+ func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime)
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift
new file mode 100644
index 000000000..3d97de3e1
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift
@@ -0,0 +1,211 @@
+import AVFoundation
+import CoreAudio
+import Foundation
+
+final class AudioMixerByMultiTrack: AudioMixer {
+ private static let defaultSampleTime: AVAudioFramePosition = 0
+
+ weak var delegate: (any AudioMixerDelegate)?
+ var settings = AudioMixerSettings.default {
+ didSet {
+ if let inSourceFormat, settings.invalidateOutputFormat(oldValue) {
+ outputFormat = settings.makeOutputFormat(inSourceFormat)
+ }
+ for (id, trackSettings) in settings.tracks {
+ tracks[id]?.settings = trackSettings
+ try? mixerNode?.update(volume: trackSettings.volume, bus: id, scope: .input)
+ }
+ }
+ }
+ var inputFormats: [UInt8: AVAudioFormat] {
+ return tracks.compactMapValues { $0.inputFormat }
+ }
+ private(set) var outputFormat: AVAudioFormat? {
+ didSet {
+ guard let outputFormat, outputFormat != oldValue else {
+ return
+ }
+ for id in tracks.keys {
+ buffers[id] = .init(outputFormat)
+ tracks[id] = .init(id: id, outputFormat: outputFormat)
+ tracks[id]?.delegate = self
+ }
+ }
+ }
+ private var inSourceFormat: CMFormatDescription? {
+ didSet {
+ guard inSourceFormat != oldValue else {
+ return
+ }
+ outputFormat = settings.makeOutputFormat(inSourceFormat)
+ }
+ }
+ private var tracks: [UInt8: AudioMixerTrack] = [:] {
+ didSet {
+ tryToSetupAudioNodes()
+ }
+ }
+ private var anchor: AVAudioTime?
+ private var buffers: [UInt8: AudioRingBuffer] = [:] {
+ didSet {
+ if logger.isEnabledFor(level: .trace) {
+ logger.trace(buffers)
+ }
+ }
+ }
+ private var mixerNode: MixerNode?
+ private var sampleTime: AVAudioFramePosition = AudioMixerByMultiTrack.defaultSampleTime
+ private var outputNode: OutputNode?
+
+ private let inputRenderCallback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer, _: UnsafePointer, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) in
+ let audioMixer = Unmanaged.fromOpaque(inRefCon).takeUnretainedValue()
+ let status = audioMixer.render(UInt8(inBusNumber), inNumberFrames: inNumberFrames, ioData: ioData)
+ guard status == noErr else {
+ audioMixer.delegate?.audioMixer(audioMixer, errorOccurred: .unableToProvideInputData)
+ return noErr
+ }
+ return status
+ }
+
+ deinit {
+ if let mixerNode = mixerNode {
+ AudioOutputUnitStop(mixerNode.audioUnit)
+ }
+ if let outputNode = outputNode {
+ AudioOutputUnitStop(outputNode.audioUnit)
+ }
+ }
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer) {
+ if settings.mainTrack == track {
+ inSourceFormat = buffer.formatDescription
+ }
+ self.track(for: track)?.append(buffer)
+ }
+
+ func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ if settings.mainTrack == track {
+ inSourceFormat = buffer.format.formatDescription
+ }
+ self.track(for: track)?.append(buffer, when: when)
+ }
+
+ private func tryToSetupAudioNodes() {
+ do {
+ try setupAudioNodes()
+ } catch {
+ logger.error(error)
+ delegate?.audioMixer(self, errorOccurred: .failedToMix(error: error))
+ }
+ }
+
+ private func setupAudioNodes() throws {
+ if let mixerNode {
+ AudioOutputUnitStop(mixerNode.audioUnit)
+ }
+ if let outputNode {
+ AudioOutputUnitStop(outputNode.audioUnit)
+ }
+ mixerNode = nil
+ outputNode = nil
+ guard let outputFormat else {
+ return
+ }
+ sampleTime = Self.defaultSampleTime
+ let mixerNode = try MixerNode(format: outputFormat)
+ try mixerNode.update(busCount: tracks.count, scope: .input)
+ let busCount = try mixerNode.busCount(scope: .input)
+ for index in 0..?) -> OSStatus {
+ guard let buffer = buffers[track] else {
+ return noErr
+ }
+ if buffer.counts == 0 {
+ guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else {
+ return noErr
+ }
+ for i in 0.. AudioMixerTrack? {
+ if let track = tracks[id] {
+ return track
+ }
+ guard let outputFormat else {
+ return nil
+ }
+ let track = AudioMixerTrack(id: id, outputFormat: outputFormat)
+ track.delegate = self
+ if let trackSettings = settings.tracks[id] {
+ track.settings = trackSettings
+ }
+ tracks[id] = track
+ buffers[id] = .init(outputFormat)
+ return track
+ }
+}
+
+extension AudioMixerByMultiTrack: AudioMixerTrackDelegate {
+ // MARK: AudioMixerTrackDelegate
+ func track(_ track: AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ delegate?.audioMixer(self, track: track.id, didInput: audioPCMBuffer, when: when)
+ buffers[track.id]?.append(audioPCMBuffer, when: when)
+ if settings.mainTrack == track.id {
+ if sampleTime == Self.defaultSampleTime {
+ sampleTime = when.sampleTime
+ anchor = when
+ }
+ mix(numberOfFrames: audioPCMBuffer.frameLength)
+ }
+ }
+
+ func track(_ track: AudioMixerTrack, errorOccurred error: AudioMixerError) {
+ delegate?.audioMixer(self, errorOccurred: error)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift
new file mode 100644
index 000000000..df00f7714
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift
@@ -0,0 +1,67 @@
+import AVFoundation
+import Foundation
+
+final class AudioMixerBySingleTrack: AudioMixer {
+ weak var delegate: (any AudioMixerDelegate)?
+ var settings = AudioMixerSettings.default {
+ didSet {
+ if let trackSettings = settings.tracks[settings.mainTrack] {
+ track?.settings = trackSettings
+ }
+ }
+ }
+ var inputFormats: [UInt8: AVAudioFormat] {
+ var formats: [UInt8: AVAudioFormat] = .init()
+ if let track = track, let inputFormat = track.inputFormat {
+ formats[track.id] = inputFormat
+ }
+ return formats
+ }
+ private(set) var outputFormat: AVAudioFormat? {
+ didSet {
+ guard let outputFormat, outputFormat != oldValue else {
+ return
+ }
+ let track = AudioMixerTrack(id: settings.mainTrack, outputFormat: outputFormat)
+ track.delegate = self
+ self.track = track
+ }
+ }
+ private var inSourceFormat: CMFormatDescription? {
+ didSet {
+ guard inSourceFormat != oldValue else {
+ return
+ }
+ outputFormat = settings.makeOutputFormat(inSourceFormat)
+ }
+ }
+ private var track: AudioMixerTrack?
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer) {
+ guard settings.mainTrack == track else {
+ return
+ }
+ inSourceFormat = buffer.formatDescription
+ self.track?.append(buffer)
+ }
+
+ func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ guard settings.mainTrack == track else {
+ return
+ }
+ inSourceFormat = buffer.format.formatDescription
+ self.track?.append(buffer, when: when)
+ }
+}
+
+extension AudioMixerBySingleTrack: AudioMixerTrackDelegate {
+ // MARK: AudioMixerTrackDelegate
+ func track(_ track: AudioMixerTrack, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ delegate?.audioMixer(self, track: track.id, didInput: buffer, when: when)
+ delegate?.audioMixer(self, didOutput: buffer.muted(settings.isMuted), when: when)
+ }
+
+ func track(_ rtrack: AudioMixerTrack, errorOccurred error: AudioMixerError) {
+ delegate?.audioMixer(self, errorOccurred: error)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift
new file mode 100644
index 000000000..5887ccd82
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift
@@ -0,0 +1,78 @@
+import AVFoundation
+import Foundation
+
+/// Constraints on the audio mixier settings.
+public struct AudioMixerSettings: Codable, Sendable {
+ /// The default value.
+ public static let `default` = AudioMixerSettings()
+ /// Maximum sampleRate supported by the system
+ public static let maximumSampleRate: Float64 = 48000.0
+
+ #if os(macOS)
+ static let commonFormat: AVAudioCommonFormat = .pcmFormatFloat32
+ #else
+ static let commonFormat: AVAudioCommonFormat = .pcmFormatInt16
+ #endif
+
+ /// Specifies the sampleRate of audio output. A value of 0 will be the same as the main track source.
+ public let sampleRate: Float64
+
+ /// Specifies the channels of audio output. A value of 0 will be the same as the main track source.
+ /// - Warning: If you are using IOStreamRecorder, please set it to 1 or 2. Otherwise, the audio will not be saved in local recordings.
+ public let channels: UInt32
+
+ /// Specifies the muted that indicates whether the audio output is muted.
+ public var isMuted: Bool
+
+ /// Specifies the main track number.
+ public var mainTrack: UInt8
+
+ /// Specifies the track settings.
+ public var tracks: [UInt8: AudioMixerTrackSettings]
+
+ /// Specifies the maximum number of channels supported by the system
+ /// - Description: The maximum number of channels to be used when the number of channels is 0 (not set). More than 2 channels are not supported by the service. It is defined to prevent audio issues since recording does not support more than 2 channels.
+ public var maximumNumberOfChannels: UInt32 = 2
+
+ /// Creates a new instance of a settings.
+ public init(
+ sampleRate: Float64 = 0,
+ channels: UInt32 = 0,
+ isMuted: Bool = false,
+ mainTrack: UInt8 = 0,
+ tracks: [UInt8: AudioMixerTrackSettings] = .init()
+ ) {
+ self.sampleRate = sampleRate
+ self.channels = channels
+ self.isMuted = isMuted
+ self.mainTrack = mainTrack
+ self.tracks = tracks
+ }
+
+ func invalidateOutputFormat(_ oldValue: Self) -> Bool {
+ return !(sampleRate == oldValue.sampleRate &&
+ channels == oldValue.channels)
+ }
+
+ func makeOutputFormat(_ formatDescription: CMFormatDescription?) -> AVAudioFormat? {
+ guard let format = AVAudioUtil.makeAudioFormat(formatDescription) else {
+ return nil
+ }
+ let sampleRate = min(sampleRate == 0 ? format.sampleRate : sampleRate, Self.maximumSampleRate)
+ let channelCount = channels == 0 ? min(format.channelCount, maximumNumberOfChannels) : channels
+ if let channelLayout = AVAudioUtil.makeChannelLayout(channelCount) {
+ return .init(
+ commonFormat: Self.commonFormat,
+ sampleRate: sampleRate,
+ interleaved: format.isInterleaved,
+ channelLayout: channelLayout
+ )
+ }
+ return .init(
+ commonFormat: Self.commonFormat,
+ sampleRate: sampleRate,
+ channels: min(channelCount, 2),
+ interleaved: format.isInterleaved
+ )
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift
new file mode 100644
index 000000000..b2fafa8b8
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift
@@ -0,0 +1,127 @@
+import Accelerate
+import AVFoundation
+
+private let kAudioMixerTrack_frameCapacity: AVAudioFrameCount = 1024
+
+protocol AudioMixerTrackDelegate: AnyObject {
+ func track(_ track: AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime)
+ func track(_ track: AudioMixerTrack, errorOccurred error: AudioMixerError)
+}
+
+final class AudioMixerTrack {
+ let id: UInt8
+ let outputFormat: AVAudioFormat
+ weak var delegate: T?
+ var settings: AudioMixerTrackSettings = .init() {
+ didSet {
+ settings.apply(audioConverter, oldValue: oldValue)
+ }
+ }
+ var inputFormat: AVAudioFormat? {
+ return audioConverter?.inputFormat
+ }
+ private var inSourceFormat: CMFormatDescription? {
+ didSet {
+ guard inSourceFormat != oldValue else {
+ return
+ }
+ setUp(inSourceFormat)
+ }
+ }
+ private var audioTime = AudioTime()
+ private var ringBuffer: AudioRingBuffer?
+ private var inputBuffer: AVAudioPCMBuffer?
+ private var outputBuffer: AVAudioPCMBuffer?
+ private var audioConverter: AVAudioConverter? {
+ didSet {
+ guard let audioConverter else {
+ return
+ }
+ audioConverter.downmix = settings.downmix
+ if let channelMap = settings.validatedChannelMap(audioConverter) {
+ audioConverter.channelMap = channelMap.map { NSNumber(value: $0) }
+ } else {
+ switch audioConverter.outputFormat.channelCount {
+ case 1:
+ audioConverter.channelMap = [0]
+ case 2:
+ audioConverter.channelMap = (audioConverter.inputFormat.channelCount == 1) ? [0, 0] : [0, 1]
+ default:
+ break
+ }
+ }
+ audioConverter.primeMethod = .normal
+ }
+ }
+
+ init(id: UInt8, outputFormat: AVAudioFormat) {
+ self.id = id
+ self.outputFormat = outputFormat
+ }
+
+ func append(_ sampleBuffer: CMSampleBuffer) {
+ inSourceFormat = sampleBuffer.formatDescription
+ if !audioTime.hasAnchor {
+ audioTime.anchor(sampleBuffer.presentationTimeStamp, sampleRate: outputFormat.sampleRate)
+ }
+ ringBuffer?.append(sampleBuffer)
+ resample()
+ }
+
+ func append(_ audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ inSourceFormat = audioBuffer.format.formatDescription
+ if !audioTime.hasAnchor {
+ audioTime.anchor(when)
+ }
+ ringBuffer?.append(audioBuffer, when: when)
+ resample()
+ }
+
+ @inline(__always)
+ private func resample() {
+ guard let outputBuffer, let inputBuffer, let ringBuffer else {
+ return
+ }
+ var status: AVAudioConverterOutputStatus? = .endOfStream
+ repeat {
+ var error: NSError?
+ status = audioConverter?.convert(to: outputBuffer, error: &error) { inNumberFrames, status in
+ if inNumberFrames <= ringBuffer.counts {
+ _ = ringBuffer.render(inNumberFrames, ioData: inputBuffer.mutableAudioBufferList)
+ inputBuffer.frameLength = inNumberFrames
+ status.pointee = .haveData
+ return inputBuffer
+ } else {
+ status.pointee = .noDataNow
+ return nil
+ }
+ }
+ switch status {
+ case .haveData:
+ delegate?.track(self, didOutput: outputBuffer.muted(settings.isMuted), when: audioTime.at)
+ audioTime.advanced(1024)
+ case .error:
+ if let error {
+ delegate?.track(self, errorOccurred: .failedToConvert(error: error))
+ }
+ default:
+ break
+ }
+ } while(status == .haveData)
+ }
+
+ private func setUp(_ inSourceFormat: CMFormatDescription?) {
+ guard let inputFormat = AVAudioUtil.makeAudioFormat(inSourceFormat) else {
+ delegate?.track(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat))
+ return
+ }
+ ringBuffer = .init(inputFormat)
+ inputBuffer = .init(pcmFormat: inputFormat, frameCapacity: kAudioMixerTrack_frameCapacity * 4)
+ outputBuffer = .init(pcmFormat: outputFormat, frameCapacity: kAudioMixerTrack_frameCapacity)
+ if logger.isEnabledFor(level: .info) {
+ logger.info("inputFormat:", inputFormat, ", outputFormat:", outputFormat)
+ }
+ audioTime.reset()
+ audioConverter = .init(from: inputFormat, to: outputFormat)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift
new file mode 100644
index 000000000..05cf47713
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift
@@ -0,0 +1,56 @@
+import AVFoundation
+
+/// Constraints on the audio mixier track's settings.
+public struct AudioMixerTrackSettings: Codable, Sendable {
+ /// The default value.
+ public static let `default` = AudioMixerTrackSettings()
+
+ /// Specifies the volume for output.
+ public var volume: Float
+
+ /// Specifies the muted that indicates whether the audio output is muted.
+ public var isMuted = false
+
+ /// Specifies the mixes the channels or not. Currently, it supports input sources with 4, 5, 6, and 8 channels.
+ public var downmix = true
+
+ /// Specifies the map of the output to input channels.
+ /// ## Example code:
+ /// ```swift
+ /// // If you want to use the 3rd and 4th channels from a 4-channel input source for a 2-channel output, you would specify it like this.
+ /// channelMap = [2, 3]
+ /// ```
+ public var channelMap: [Int]?
+
+ /// Creates a new instance.
+ public init(volume: Float = 1.0, isMuted: Bool = false, downmix: Bool = true, channelMap: [Int]? = nil) {
+ self.volume = volume
+ self.isMuted = isMuted
+ self.downmix = downmix
+ self.channelMap = channelMap
+ }
+
+ func apply(_ converter: AVAudioConverter?, oldValue: AudioMixerTrackSettings) {
+ guard let converter else {
+ return
+ }
+ if downmix != oldValue.downmix {
+ converter.downmix = downmix
+ }
+ if channelMap != oldValue.channelMap {
+ if let channelMap = validatedChannelMap(converter) {
+ converter.channelMap = channelMap.map { NSNumber(value: $0) }
+ }
+ }
+ }
+
+ func validatedChannelMap(_ converter: AVAudioConverter) -> [Int]? {
+ guard let channelMap, channelMap.count == converter.outputFormat.channelCount else {
+ return nil
+ }
+ for inputChannel in channelMap where converter.inputFormat.channelCount <= inputChannel {
+ return nil
+ }
+ return channelMap
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift
new file mode 100644
index 000000000..ff24853a9
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift
@@ -0,0 +1,116 @@
+import AudioUnit
+import AVFoundation
+import CoreAudio
+import CoreMedia
+import Foundation
+
+final class AudioMonitor {
+ var inputFormat: AVAudioFormat? {
+ didSet {
+ if let inputFormat {
+ ringBuffer = .init(inputFormat)
+ if isRunning {
+ audioUnit = makeAudioUnit()
+ }
+ } else {
+ ringBuffer = nil
+ }
+ }
+ }
+ private(set) var isRunning = false
+ private var audioUnit: AudioUnit? {
+ didSet {
+ if let oldValue {
+ AudioOutputUnitStop(oldValue)
+ AudioUnitUninitialize(oldValue)
+ AudioComponentInstanceDispose(oldValue)
+ }
+ if let audioUnit {
+ AudioOutputUnitStart(audioUnit)
+ }
+ }
+ }
+ private var ringBuffer: AudioRingBuffer?
+
+ private let callback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer, _: UnsafePointer, _: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) in
+ let monitor = Unmanaged.fromOpaque(inRefCon).takeUnretainedValue()
+ return monitor.render(inNumberFrames, ioData: ioData)
+ }
+
+ deinit {
+ stopRunning()
+ }
+
+ func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ guard isRunning else {
+ return
+ }
+ ringBuffer?.append(audioPCMBuffer, when: when)
+ }
+
+ private func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) -> OSStatus {
+ guard let ringBuffer else {
+ return noErr
+ }
+ if ringBuffer.counts == 0 {
+ guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else {
+ return noErr
+ }
+ for i in 0.. AudioUnit? {
+ guard let inputFormat else {
+ return nil
+ }
+ var inSourceFormat = inputFormat.formatDescription.audioStreamBasicDescription
+ var audioUnit: AudioUnit?
+ #if os(macOS)
+ let subType = kAudioUnitSubType_DefaultOutput
+ #else
+ let subType = kAudioUnitSubType_RemoteIO
+ #endif
+ var audioComponentDescription = AudioComponentDescription(
+ componentType: kAudioUnitType_Output,
+ componentSubType: subType,
+ componentManufacturer: kAudioUnitManufacturer_Apple,
+ componentFlags: 0,
+ componentFlagsMask: 0)
+ let audioComponent = AudioComponentFindNext(nil, &audioComponentDescription)
+ if let audioComponent {
+ AudioComponentInstanceNew(audioComponent, &audioUnit)
+ }
+ if let audioUnit {
+ AudioUnitInitialize(audioUnit)
+ let ref = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())
+ var callbackstruct = AURenderCallbackStruct(inputProc: callback, inputProcRefCon: ref)
+ AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &callbackstruct, UInt32(MemoryLayout.size(ofValue: callbackstruct)))
+ AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &inSourceFormat, UInt32(MemoryLayout.size(ofValue: inSourceFormat)))
+ }
+ return audioUnit
+ }
+}
+
+extension AudioMonitor: Runner {
+ // MARK: Running
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ audioUnit = makeAudioUnit()
+ isRunning = true
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ audioUnit = nil
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift
new file mode 100644
index 000000000..5c911f524
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift
@@ -0,0 +1,271 @@
+import AVFoundation
+
+class AudioNode {
+ enum Error: Swift.Error {
+ case unableToFindAudioComponent
+ case unableToCreateAudioUnit(_ status: OSStatus)
+ case unableToInitializeAudioUnit(_ status: OSStatus)
+ case unableToUpdateBus(_ status: OSStatus)
+ case unableToRetrieveValue(_ status: OSStatus)
+ case unableToConnectToNode(_ status: OSStatus)
+ }
+
+ enum BusScope: String, CaseIterable {
+ case input
+ case output
+
+ var audioUnitScope: AudioUnitScope {
+ switch self {
+ case .input:
+ return kAudioUnitScope_Input
+ case .output:
+ return kAudioUnitScope_Output
+ }
+ }
+ }
+
+ let audioUnit: AudioUnit
+
+ init(description: inout AudioComponentDescription) throws {
+ guard let audioComponent = AudioComponentFindNext(nil, &description) else {
+ throw Error.unableToFindAudioComponent
+ }
+ var audioUnit: AudioUnit?
+ let status = AudioComponentInstanceNew(audioComponent, &audioUnit)
+ guard status == noErr, let audioUnit else {
+ throw Error.unableToCreateAudioUnit(status)
+ }
+ self.audioUnit = audioUnit
+ }
+
+ deinit {
+ AudioOutputUnitStop(audioUnit)
+ AudioUnitUninitialize(audioUnit)
+ AudioComponentInstanceDispose(audioUnit)
+ }
+
+ func initializeAudioUnit() throws {
+ let status = AudioUnitInitialize(audioUnit)
+ guard status == noErr else {
+ throw Error.unableToInitializeAudioUnit(status)
+ }
+ }
+
+ @discardableResult
+ func connect(to node: AudioNode, sourceBus: Int = 0, destBus: Int = 0) throws -> AudioUnitConnection {
+ var connection = AudioUnitConnection(sourceAudioUnit: audioUnit,
+ sourceOutputNumber: UInt32(sourceBus),
+ destInputNumber: UInt32(destBus))
+ let status = AudioUnitSetProperty(node.audioUnit,
+ kAudioUnitProperty_MakeConnection,
+ kAudioUnitScope_Input,
+ 0,
+ &connection,
+ UInt32(MemoryLayout.size))
+ guard status == noErr else {
+ throw Error.unableToConnectToNode(status)
+ }
+ return connection
+ }
+
+ func update(format: AVAudioFormat, bus: UInt8, scope: BusScope) throws {
+ var asbd = format.streamDescription.pointee
+ let status = AudioUnitSetProperty(audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ scope.audioUnitScope,
+ UInt32(bus),
+ &asbd,
+ UInt32(MemoryLayout.size))
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ }
+
+ func format(bus: UInt8, scope: BusScope) throws -> AudioStreamBasicDescription {
+ var asbd = AudioStreamBasicDescription()
+ var propertySize = UInt32(MemoryLayout.size)
+ let status = AudioUnitGetProperty(audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ scope.audioUnitScope,
+ UInt32(bus),
+ &asbd,
+ &propertySize)
+ guard status == noErr else {
+ throw Error.unableToRetrieveValue(status)
+ }
+ return asbd
+ }
+
+ /// Apple bug: Cannot set to less than 8 buses
+ func update(busCount: Int, scope: BusScope) throws {
+ var busCount = UInt32(busCount)
+ let status = AudioUnitSetProperty(audioUnit,
+ kAudioUnitProperty_ElementCount,
+ scope.audioUnitScope,
+ 0,
+ &busCount,
+ UInt32(MemoryLayout.size))
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ }
+
+ func busCount(scope: BusScope) throws -> Int {
+ var busCount: UInt32 = 0
+ var propertySize = UInt32(MemoryLayout.size)
+ let status = AudioUnitGetProperty(audioUnit,
+ kAudioUnitProperty_ElementCount,
+ scope.audioUnitScope,
+ 0,
+ &busCount,
+ &propertySize)
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ return Int(busCount)
+ }
+}
+
+final class MixerNode: AudioNode {
+ private var mixerComponentDescription = AudioComponentDescription(
+ componentType: kAudioUnitType_Mixer,
+ componentSubType: kAudioUnitSubType_MultiChannelMixer,
+ componentManufacturer: kAudioUnitManufacturer_Apple,
+ componentFlags: 0,
+ componentFlagsMask: 0)
+
+ init(format: AVAudioFormat) throws {
+ var mixerDefaultDesc = AudioComponentDescription(
+ componentType: kAudioUnitType_Mixer,
+ componentSubType: kAudioUnitSubType_MultiChannelMixer,
+ componentManufacturer: kAudioUnitManufacturer_Apple,
+ componentFlags: 0,
+ componentFlagsMask: 0)
+
+ try super.init(description: &mixerDefaultDesc)
+
+ self.mixerComponentDescription = mixerDefaultDesc
+ }
+
+ func update(inputCallback: inout AURenderCallbackStruct, bus: UInt8) throws {
+ let status = AudioUnitSetProperty(audioUnit,
+ kAudioUnitProperty_SetRenderCallback,
+ kAudioUnitScope_Input,
+ UInt32(bus),
+ &inputCallback,
+ UInt32(MemoryLayout.size))
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ }
+
+ func enable(bus: UInt8, scope: AudioNode.BusScope, isEnabled: Bool) throws {
+ let value: AudioUnitParameterValue = isEnabled ? 1 : 0
+ let status = AudioUnitSetParameter(audioUnit,
+ kMultiChannelMixerParam_Enable,
+ scope.audioUnitScope,
+ UInt32(bus),
+ value,
+ 0)
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ }
+
+ func isEnabled(bus: UInt8, scope: AudioNode.BusScope) throws -> Bool {
+ var value: AudioUnitParameterValue = 0
+ let status = AudioUnitGetParameter(audioUnit,
+ kMultiChannelMixerParam_Enable,
+ scope.audioUnitScope,
+ UInt32(bus),
+ &value)
+ guard status == noErr else {
+ throw Error.unableToRetrieveValue(status)
+ }
+ return value != 0
+ }
+
+ func update(volume: Float, bus: UInt8, scope: AudioNode.BusScope) throws {
+ let value: AudioUnitParameterValue = max(0, min(1, volume))
+ let status = AudioUnitSetParameter(audioUnit,
+ kMultiChannelMixerParam_Volume,
+ scope.audioUnitScope,
+ UInt32(bus),
+ value,
+ 0)
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ }
+
+ func volume(bus: UInt8, of scope: AudioNode.BusScope) throws -> Float {
+ var value: AudioUnitParameterValue = 0
+ let status = AudioUnitGetParameter(audioUnit,
+ kMultiChannelMixerParam_Volume,
+ scope.audioUnitScope,
+ UInt32(bus),
+ &value)
+ guard status == noErr else {
+ throw Error.unableToUpdateBus(status)
+ }
+ return value
+ }
+}
+
+final class OutputNode: AudioNode {
+ enum Error: Swift.Error {
+ case unableToRenderFrames
+ case unableToAllocateBuffer
+ }
+
+ private var outputComponentDescription = AudioComponentDescription(
+ componentType: kAudioUnitType_Output,
+ componentSubType: kAudioUnitSubType_GenericOutput,
+ componentManufacturer: kAudioUnitManufacturer_Apple,
+ componentFlags: 0,
+ componentFlagsMask: 0)
+
+ var format: AVAudioFormat {
+ buffer.format
+ }
+ private let buffer: AVAudioPCMBuffer
+ private var timeStamp: AudioTimeStamp = {
+ var timestamp = AudioTimeStamp()
+ timestamp.mFlags = .sampleTimeValid
+ return timestamp
+ }()
+
+ init(format: AVAudioFormat) throws {
+ guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024) else {
+ throw Error.unableToAllocateBuffer
+ }
+ self.buffer = buffer
+
+ var outputDefaultDesc = AudioComponentDescription(
+ componentType: kAudioUnitType_Output,
+ componentSubType: kAudioUnitSubType_GenericOutput,
+ componentManufacturer: kAudioUnitManufacturer_Apple,
+ componentFlags: 0,
+ componentFlagsMask: 0)
+
+ try super.init(description: &outputDefaultDesc)
+
+ self.outputComponentDescription = outputDefaultDesc
+ }
+
+ func render(numberOfFrames: AVAudioFrameCount,
+ sampleTime: AVAudioFramePosition) throws -> AVAudioPCMBuffer {
+ timeStamp.mSampleTime = Float64(sampleTime)
+ buffer.frameLength = numberOfFrames
+ let status = AudioUnitRender(audioUnit,
+ nil,
+ &timeStamp,
+ 0,
+ numberOfFrames,
+ buffer.mutableAudioBufferList)
+ guard status == noErr else {
+ throw Error.unableToRenderFrames
+ }
+ return buffer
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift
new file mode 100644
index 000000000..030617050
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift
@@ -0,0 +1,229 @@
+import Accelerate
+import AVFoundation
+import CoreAudio
+import CoreMedia
+import Foundation
+
+final class AudioRingBuffer {
+ private static let bufferCounts: UInt32 = 16
+ private static let numSamples: UInt32 = 1024
+
+ var counts: Int {
+ if tail <= head {
+ return head - tail + skip
+ }
+ return Int(outputBuffer.frameLength) - tail + head + skip
+ }
+
+ private var head = 0
+ private var tail = 0
+ private var skip = 0
+ private var sampleTime: AVAudioFramePosition = 0
+ private var inputFormat: AVAudioFormat
+ private var inputBuffer: AVAudioPCMBuffer
+ private var outputBuffer: AVAudioPCMBuffer
+
+ init?(_ inputFormat: AVAudioFormat, bufferCounts: UInt32 = AudioRingBuffer.bufferCounts) {
+ guard
+ let inputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples) else {
+ return nil
+ }
+ guard let outputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples * bufferCounts) else {
+ return nil
+ }
+ self.inputFormat = inputFormat
+ self.inputBuffer = inputBuffer
+ self.outputBuffer = outputBuffer
+ self.outputBuffer.frameLength = self.outputBuffer.frameCapacity
+ }
+
+ func isDataAvailable(_ inNumberFrames: UInt32) -> Bool {
+ return inNumberFrames <= counts
+ }
+
+ func append(_ sampleBuffer: CMSampleBuffer) {
+ guard CMSampleBufferDataIsReady(sampleBuffer) else {
+ return
+ }
+ let targetSampleTime: CMTimeValue
+ if sampleBuffer.presentationTimeStamp.timescale == Int32(inputBuffer.format.sampleRate) {
+ targetSampleTime = sampleBuffer.presentationTimeStamp.value
+ } else {
+ targetSampleTime = Int64(Double(sampleBuffer.presentationTimeStamp.value) * inputBuffer.format.sampleRate / Double(sampleBuffer.presentationTimeStamp.timescale))
+ }
+ if sampleTime == 0 {
+ sampleTime = targetSampleTime
+ }
+ if outputBuffer.frameLength < sampleBuffer.numSamples {
+ skip += sampleBuffer.numSamples
+ return
+ }
+ if inputBuffer.frameLength < sampleBuffer.numSamples {
+ if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) {
+ self.inputBuffer = buffer
+ }
+ }
+ inputBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples)
+ let status = CMSampleBufferCopyPCMDataIntoAudioBufferList(
+ sampleBuffer,
+ at: 0,
+ frameCount: Int32(sampleBuffer.numSamples),
+ into: inputBuffer.mutableAudioBufferList
+ )
+ if status == noErr && kLinearPCMFormatFlagIsBigEndian == ((sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatFlags ?? 0) & kLinearPCMFormatFlagIsBigEndian) {
+ if inputFormat.isInterleaved {
+ switch inputFormat.commonFormat {
+ case .pcmFormatInt16:
+ let length = sampleBuffer.dataBuffer?.dataLength ?? 0
+ var image = vImage_Buffer(data: inputBuffer.mutableAudioBufferList[0].mBuffers.mData, height: 1, width: vImagePixelCount(length / 2), rowBytes: length)
+ vImageByteSwap_Planar16U(&image, &image, vImage_Flags(kvImageNoFlags))
+ default:
+ break
+ }
+ }
+ }
+ skip = max(Int(targetSampleTime - sampleTime), 0)
+ sampleTime += Int64(skip)
+ append(inputBuffer)
+ }
+
+ func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ if sampleTime == 0 {
+ sampleTime = when.sampleTime
+ }
+ if inputBuffer.frameLength < audioPCMBuffer.frameLength {
+ if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: audioPCMBuffer.frameCapacity) {
+ self.inputBuffer = buffer
+ }
+ }
+ inputBuffer.frameLength = audioPCMBuffer.frameLength
+ _ = inputBuffer.copy(audioPCMBuffer)
+ skip = Int(max(when.sampleTime - sampleTime, 0))
+ sampleTime += Int64(skip)
+ append(inputBuffer)
+ }
+
+ func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?, offset: Int = 0) -> OSStatus {
+ if 0 < skip {
+ let numSamples = min(Int(inNumberFrames), skip)
+ guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else {
+ return -1
+ }
+ if inputFormat.isInterleaved {
+ let channelCount = Int(inputFormat.channelCount)
+ switch inputFormat.commonFormat {
+ case .pcmFormatInt16:
+ bufferList[0].mData?.assumingMemoryBound(to: Int16.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples)
+ case .pcmFormatInt32:
+ bufferList[0].mData?.assumingMemoryBound(to: Int32.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples)
+ case .pcmFormatFloat32:
+ bufferList[0].mData?.assumingMemoryBound(to: Float32.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples)
+ default:
+ break
+ }
+ } else {
+ for i in 0.. CMClock? {
+ if #available(macOS 12.3, *) {
+ return session.synchronizationClock
+ } else {
+ return session.masterClock
+ }
+ }
+
+ func makeSession(_ sessionPreset: AVCaptureSession.Preset) -> AVCaptureSession {
+ let session = AVCaptureSession()
+ if session.canSetSessionPreset(sessionPreset) {
+ session.sessionPreset = sessionPreset
+ }
+ return session
+ }
+
+ func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool {
+ false
+ }
+ }
+ #elseif os(iOS) || os(tvOS)
+ struct Capabilities {
+ static var isMultiCamSupported: Bool {
+ if #available(tvOS 17.0, *) {
+ return AVCaptureMultiCamSession.isMultiCamSupported
+ } else {
+ return false
+ }
+ }
+
+ var isMultiCamSessionEnabled = false {
+ didSet {
+ if !Self.isMultiCamSupported {
+ isMultiCamSessionEnabled = false
+ logger.info("This device can't support the AVCaptureMultiCamSession.")
+ }
+ }
+ }
+
+ #if os(iOS)
+ func synchronizationClock(_ session: AVCaptureSession) -> CMClock? {
+ if #available(iOS 15.4, *) {
+ return session.synchronizationClock
+ } else {
+ return session.masterClock
+ }
+ }
+ #endif
+
+ @available(tvOS 17.0, *)
+ func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool {
+ if #available(iOS 16.0, tvOS 17.0, *) {
+ session.isMultitaskingCameraAccessEnabled
+ } else {
+ false
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ func makeSession(_ sessionPreset: AVCaptureSession.Preset) -> AVCaptureSession {
+ let session: AVCaptureSession
+ if isMultiCamSessionEnabled {
+ session = AVCaptureMultiCamSession()
+ } else {
+ session = AVCaptureSession()
+ }
+ if session.canSetSessionPreset(sessionPreset) {
+ session.sessionPreset = sessionPreset
+ }
+ return session
+ }
+ }
+ #else
+ struct Capabilities {
+ static let isMultiCamSupported = false
+
+ var isMultiCamSessionEnabled = false {
+ didSet {
+ isMultiCamSessionEnabled = false
+ }
+ }
+
+ func synchronizationClock(_ session: AVCaptureSession) -> CMClock? {
+ return session.synchronizationClock
+ }
+
+ func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool {
+ false
+ }
+ }
+ #endif
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift
new file mode 100644
index 000000000..b8f7be12d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift
@@ -0,0 +1,365 @@
+import AVFoundation
+
+protocol CaptureSessionConvertible: Runner {
+ #if !os(visionOS)
+ @available(tvOS 17.0, *)
+ var sessionPreset: AVCaptureSession.Preset { get set }
+ #endif
+
+ var isCapturing: Bool { get }
+ var isInturreped: AsyncStream { get }
+ var runtimeError: AsyncStream { get }
+ var synchronizationClock: CMClock? { get }
+ var isMultiCamSessionEnabled: Bool { get set }
+
+ @available(tvOS 17.0, *)
+ var isMultitaskingCameraAccessEnabled: Bool { get }
+
+ @available(tvOS 17.0, *)
+ func attachCapture(_ capture: (any DeviceUnit)?)
+ @available(tvOS 17.0, *)
+ func detachCapture(_ capture: (any DeviceUnit)?)
+ @available(tvOS 17.0, *)
+ func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void) rethrows
+ @available(tvOS 17.0, *)
+ func startRunningIfNeeded()
+}
+
+#if os(macOS) || os(iOS) || os(visionOS)
+final class CaptureSession {
+ var isMultiCamSessionEnabled: Bool {
+ get {
+ capabilities.isMultiCamSessionEnabled
+ }
+ set {
+ capabilities.isMultiCamSessionEnabled = newValue
+ }
+ }
+
+ var isCapturing: Bool {
+ session.isRunning
+ }
+
+ var isMultitaskingCameraAccessEnabled: Bool {
+ capabilities.isMultitaskingCameraAccessEnabled(session)
+ }
+
+ @AsyncStreamedFlow
+ var isInturreped: AsyncStream
+
+ @AsyncStreamedFlow
+ var runtimeError: AsyncStream
+
+ var synchronizationClock: CMClock? {
+ capabilities.synchronizationClock(session)
+ }
+
+ private(set) var isRunning = false
+
+ #if !os(visionOS)
+ var sessionPreset: AVCaptureSession.Preset = .default {
+ didSet {
+ guard sessionPreset != oldValue, session.canSetSessionPreset(sessionPreset) else {
+ return
+ }
+ session.beginConfiguration()
+ session.sessionPreset = sessionPreset
+ session.commitConfiguration()
+ }
+ }
+ private(set) lazy var session: AVCaptureSession = capabilities.makeSession(sessionPreset)
+ #else
+ private(set) lazy var session = AVCaptureSession()
+ #endif
+
+ private lazy var capabilities = Capabilities()
+
+ deinit {
+ if session.isRunning {
+ session.stopRunning()
+ }
+ }
+}
+#elseif os(tvOS)
+final class CaptureSession {
+ var isMultiCamSessionEnabled: Bool {
+ get {
+ capabilities.isMultiCamSessionEnabled
+ }
+ set {
+ capabilities.isMultiCamSessionEnabled = newValue
+ }
+ }
+
+ var isCapturing: Bool {
+ if #available(tvOS 17.0, *) {
+ session.isRunning
+ } else {
+ false
+ }
+ }
+
+ var isMultitaskingCameraAccessEnabled: Bool {
+ if #available(tvOS 17.0, *) {
+ capabilities.isMultitaskingCameraAccessEnabled(session)
+ } else {
+ false
+ }
+ }
+
+ @AsyncStreamedFlow
+ var isInturreped: AsyncStream
+
+ @AsyncStreamedFlow
+ var runtimeError: AsyncStream
+
+ var synchronizationClock: CMClock? {
+ if #available(tvOS 17.0, *) {
+ return session.synchronizationClock
+ } else {
+ return nil
+ }
+ }
+
+ private(set) var isRunning = false
+
+ private var _session: Any?
+ /// The capture session instance.
+ @available(tvOS 17.0, *)
+ var session: AVCaptureSession {
+ if _session == nil {
+ _session = capabilities.makeSession(sessionPreset)
+ }
+ return _session as! AVCaptureSession
+ }
+
+ private var _sessionPreset: Any?
+ @available(tvOS 17.0, *)
+ var sessionPreset: AVCaptureSession.Preset {
+ get {
+ if _sessionPreset == nil {
+ _sessionPreset = AVCaptureSession.Preset.default
+ }
+ return _sessionPreset as! AVCaptureSession.Preset
+ }
+ set {
+ guard sessionPreset != newValue, session.canSetSessionPreset(newValue) else {
+ return
+ }
+ session.beginConfiguration()
+ session.sessionPreset = newValue
+ session.commitConfiguration()
+ }
+ }
+
+ private lazy var capabilities = Capabilities()
+
+ deinit {
+ guard #available(tvOS 17.0, *) else {
+ return
+ }
+ if session.isRunning {
+ session.stopRunning()
+ }
+ }
+}
+#endif
+
+extension CaptureSession: CaptureSessionConvertible {
+ // MARK: CaptureSessionConvertible
+ @available(tvOS 17.0, *)
+ func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void) rethrows {
+ session.beginConfiguration()
+ defer {
+ session.commitConfiguration()
+ }
+ try lambda(session)
+ }
+
+ @available(tvOS 17.0, *)
+ func attachCapture(_ capture: (any DeviceUnit)?) {
+ guard let capture else {
+ return
+ }
+ #if !os(visionOS)
+ if let connection = capture.connection {
+ if let input = capture.input, session.canAddInput(input) {
+ session.addInputWithNoConnections(input)
+ }
+ if let output = capture.output, session.canAddOutput(output) {
+ session.addOutputWithNoConnections(output)
+ }
+ if session.canAddConnection(connection) {
+ session.addConnection(connection)
+ }
+ return
+ }
+ #endif
+ if let input = capture.input, session.canAddInput(input) {
+ session.addInput(input)
+ }
+ if let output = capture.output, session.canAddOutput(output) {
+ session.addOutput(output)
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ func detachCapture(_ capture: (any DeviceUnit)?) {
+ guard let capture else {
+ return
+ }
+ #if !os(visionOS)
+ if let connection = capture.connection {
+ if capture.output?.connections.contains(connection) == true {
+ session.removeConnection(connection)
+ }
+ }
+ #endif
+ if let input = capture.input, session.inputs.contains(input) {
+ session.removeInput(input)
+ }
+ if let output = capture.output, session.outputs.contains(output) {
+ session.removeOutput(output)
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ func startRunningIfNeeded() {
+ guard isRunning && !session.isRunning else {
+ return
+ }
+ session.startRunning()
+ isRunning = session.isRunning
+ }
+
+ @available(tvOS 17.0, *)
+ private func addSessionObservers(_ session: AVCaptureSession) {
+ NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(_:)), name: .AVCaptureSessionRuntimeError, object: session)
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded(_:)), name: .AVCaptureSessionInterruptionEnded, object: session)
+ NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted(_:)), name: .AVCaptureSessionWasInterrupted, object: session)
+ #endif
+ }
+
+ @available(tvOS 17.0, *)
+ private func removeSessionObservers(_ session: AVCaptureSession) {
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionWasInterrupted, object: session)
+ NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionInterruptionEnded, object: session)
+ #endif
+ NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionRuntimeError, object: session)
+ _runtimeError.finish()
+ }
+
+ @available(tvOS 17.0, *)
+ @objc
+ private func sessionRuntimeError(_ notification: NSNotification) {
+ guard
+ let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else {
+ return
+ }
+ _runtimeError.yield(AVError(_nsError: errorValue))
+ }
+
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ @available(tvOS 17.0, *)
+ @objc
+ private func sessionWasInterrupted(_ notification: Notification) {
+ _isInturreped.yield(true)
+ }
+
+ @available(tvOS 17.0, *)
+ @objc
+ private func sessionInterruptionEnded(_ notification: Notification) {
+ _isInturreped.yield(false)
+ }
+ #endif
+}
+
+extension CaptureSession: Runner {
+ // MARK: Runner
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ if #available(tvOS 17.0, *) {
+ addSessionObservers(session)
+ session.startRunning()
+ isRunning = session.isRunning
+ } else {
+ isRunning = true
+ }
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ if #available(tvOS 17.0, *) {
+ removeSessionObservers(session)
+ session.stopRunning()
+ isRunning = session.isRunning
+ } else {
+ isRunning = false
+ }
+ }
+}
+
+final class NullCaptureSession: CaptureSessionConvertible {
+ #if !os(visionOS)
+ @available(tvOS 17.0, *)
+ var sessionPreset: AVCaptureSession.Preset {
+ get {
+ return .default
+ }
+ set {
+ }
+ }
+ #endif
+
+ let isCapturing: Bool = false
+ var isMultiCamSessionEnabled = false
+ let isMultitaskingCameraAccessEnabled = false
+ let synchronizationClock: CMClock? = nil
+
+ @AsyncStreamed(false)
+ var isInturreped: AsyncStream
+
+ @AsyncStreamedFlow
+ var runtimeError: AsyncStream
+
+ private(set) var isRunning = false
+
+ @available(tvOS 17.0, *)
+ func attachCapture(_ capture: (any DeviceUnit)?) {
+ }
+
+ @available(tvOS 17.0, *)
+ func detachCapture(_ capture: (any DeviceUnit)?) {
+ }
+
+ @available(tvOS 17.0, *)
+ func configuration(_ lambda: (AVCaptureSession) throws -> Void) rethrows {
+ }
+
+ func startRunningIfNeeded() {
+ }
+}
+
+extension NullCaptureSession: Runner {
+ // MARK: Runner
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ isRunning = true
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ _runtimeError.finish()
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift
new file mode 100644
index 000000000..6a959f7ee
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift
@@ -0,0 +1,15 @@
+import AVFAudio
+import Foundation
+
+protocol CaptureUnit {
+ var lockQueue: DispatchQueue { get }
+ var isSuspended: Bool { get }
+
+ @available(tvOS 17.0, *)
+ func suspend()
+
+ @available(tvOS 17.0, *)
+ func resume()
+
+ func finish()
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift
new file mode 100644
index 000000000..1d6aa414a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift
@@ -0,0 +1,15 @@
+import AVFoundation
+import Foundation
+
+@available(tvOS 17.0, *)
+protocol DeviceUnit {
+ associatedtype Output: AVCaptureOutput
+
+ var track: UInt8 { get }
+ var input: AVCaptureInput? { get }
+ var output: Output? { get }
+ var device: AVCaptureDevice? { get }
+ var connection: AVCaptureConnection? { get }
+
+ init(_ track: UInt8, device: AVCaptureDevice) throws
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift
new file mode 100644
index 000000000..ccd90c2aa
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift
@@ -0,0 +1,108 @@
+import AVFoundation
+import CoreImage
+
+/// Defines the dynamic range mode used for rendering or video processing.
+///
+/// - Note: Live streaming is **not yet supported** when using HDR mode.
+public enum DynamicRangeMode: Sendable {
+ private static let colorSpaceITUR709 = CGColorSpace(name: CGColorSpace.itur_709)
+ private static let colorSpaceITUR2100 = CGColorSpace(name: CGColorSpace.itur_2100_HLG)
+
+ /// Standard Dynamic Range (SDR) mode.
+ /// Uses the sRGB color space and standard luminance range.
+ case sdr
+
+ /// High Dynamic Range (HDR) mode.
+ /// Uses the ITU-R BT.2100 HLG color space for wide color gamut and extended brightness.
+ case hdr
+
+ var videoFormat: OSType {
+ switch self {
+ case .sdr:
+ return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ case .hdr:
+ return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
+ }
+ }
+
+ var colorSpace: CGColorSpace? {
+ switch self {
+ case .sdr:
+ return DynamicRangeMode.colorSpaceITUR709
+ case .hdr:
+ return DynamicRangeMode.colorSpaceITUR2100
+ }
+ }
+
+ private var contextOptions: [CIContextOption: Any]? {
+ guard let colorSpace else {
+ return nil
+ }
+ return [
+ .workingFormat: CIFormat.RGBAh.rawValue,
+ .workingColorSpace: colorSpace,
+ .outputColorSpace: colorSpace
+ ]
+ }
+
+ private var pixelFormat: OSType {
+ switch self {
+ case .sdr:
+ return kCVPixelFormatType_32ARGB
+ case .hdr:
+ return kCVPixelFormatType_64RGBAHalf
+ }
+ }
+
+ func attach(_ pixelBuffer: CVPixelBuffer) {
+ switch self {
+ case .sdr:
+ break
+ case .hdr:
+ CVBufferSetAttachment(
+ pixelBuffer,
+ kCVImageBufferColorPrimariesKey,
+ kCVImageBufferColorPrimaries_ITU_R_2020,
+ .shouldPropagate
+ )
+ CVBufferSetAttachment(
+ pixelBuffer,
+ kCVImageBufferTransferFunctionKey,
+ kCVImageBufferTransferFunction_ITU_R_2100_HLG,
+ .shouldPropagate
+ )
+ CVBufferSetAttachment(
+ pixelBuffer,
+ kCVImageBufferYCbCrMatrixKey,
+ kCVImageBufferYCbCrMatrix_ITU_R_2020,
+ .shouldPropagate
+ )
+ }
+ }
+
+ func makeCIContext() -> CIContext {
+ guard let device = MTLCreateSystemDefaultDevice() else {
+ return CIContext(options: contextOptions)
+ }
+ return CIContext(mtlDevice: device, options: contextOptions)
+ }
+
+ func makePixelBufferAttributes(_ size: CGSize) -> CFDictionary {
+ switch self {
+ case .sdr:
+ return [
+ kCVPixelBufferPixelFormatTypeKey: NSNumber(value: pixelFormat),
+ kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue,
+ kCVPixelBufferWidthKey: NSNumber(value: Int(size.width)),
+ kCVPixelBufferHeightKey: NSNumber(value: Int(size.height))
+ ] as CFDictionary
+ case .hdr:
+ return [
+ kCVPixelBufferPixelFormatTypeKey: NSNumber(value: videoFormat),
+ kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue,
+ kCVPixelBufferWidthKey: NSNumber(value: Int(size.width)),
+ kCVPixelBufferHeightKey: NSNumber(value: Int(size.height))
+ ] as CFDictionary
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift
new file mode 100644
index 000000000..707cb65c7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift
@@ -0,0 +1,567 @@
+@preconcurrency import AVFoundation
+
+#if canImport(UIKit)
+import UIKit
+#endif
+
+/// An actor that mixies audio and video for streaming.
+public final actor MediaMixer {
+ static let defaultFrameRate: Float64 = 30
+
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// The mixer failed to failed to attach device.
+ case failedToAttach(_ error: any Swift.Error)
+ /// The mixer missing a device of track.
+ case deviceNotFound
+ }
+
+ /// An enumeration defines the capture session mode used for video/audio input.
+ public enum CaptureSessionMode: Sendable {
+ /// Uses a standard `AVCaptureSession`
+ case single
+ /// Uses an `AVCaptureMultiCamSession`
+ case multi
+ /// Does not use a `AVCaptureSession`. Set this when using ReplayKit, as AVCaptureSession is not required.
+ case manual
+
+ func makeSession() -> (any CaptureSessionConvertible) {
+ switch self {
+ case .single:
+ let session = CaptureSession()
+ session.isMultiCamSessionEnabled = false
+ return session
+ case .multi:
+ let session = CaptureSession()
+ session.isMultiCamSessionEnabled = true
+ return session
+ case .manual:
+ return NullCaptureSession()
+ }
+ }
+ }
+
+ /// The offscreen rendering object.
+ @ScreenActor
+ public private(set) lazy var screen = Screen()
+
+ /// The capture session mode.
+ public let captureSessionMode: CaptureSessionMode
+
+ /// The feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit.
+ public let isMultiTrackAudioMixingEnabled: Bool
+
+ /// The dynamic range mode.
+ public private(set) var dynamicRangeMode: DynamicRangeMode = .sdr
+
+ #if os(iOS) || os(tvOS)
+ /// The AVCaptureMultiCamSession enabled.
+ @available(tvOS 17.0, *)
+ public var isMultiCamSessionEnabled: Bool {
+ session.isMultiCamSessionEnabled
+ }
+ #endif
+
+ #if os(iOS) || os(macOS) || os(tvOS)
+ /// The device torch indicating wheter the turn on(TRUE) or not(FALSE).
+ public var isTorchEnabled: Bool {
+ videoIO.isTorchEnabled
+ }
+
+ /// The sessionPreset for the AVCaptureSession.
+ @available(tvOS 17.0, *)
+ public var sessionPreset: AVCaptureSession.Preset {
+ session.sessionPreset
+ }
+ #endif
+
+ /// The audio monitoring enabled or not.
+ public var isMonitoringEnabled: Bool {
+ audioIO.isMonitoringEnabled
+ }
+
+ /// The audio mixer settings.
+ public var audioMixerSettings: AudioMixerSettings {
+ audioIO.mixerSettings
+ }
+
+ /// The video mixer settings.
+ public var videoMixerSettings: VideoMixerSettings {
+ videoIO.mixerSettings
+ }
+
+ /// The audio input formats.
+ public var audioInputFormats: [UInt8: AVAudioFormat] {
+ audioIO.inputFormats
+ }
+
+ /// The video input formats.
+ public var videoInputFormats: [UInt8: CMFormatDescription] {
+ videoIO.inputFormats
+ }
+
+ /// The output frame rate.
+ public private(set) var frameRate = MediaMixer.defaultFrameRate
+
+ /// The AVCaptureSession is in a running state or not.
+ @available(tvOS 17.0, *)
+ public var isCapturing: Bool {
+ session.isCapturing
+ }
+
+ /// The interrupts events is occured or not.
+ public var isInterputted: AsyncStream {
+ session.isInturreped
+ }
+
+ #if os(iOS) || os(macOS)
+ /// The video orientation for stream.
+ public var videoOrientation: AVCaptureVideoOrientation {
+ videoIO.videoOrientation
+ }
+ #endif
+
+ public private(set) var isRunning = false
+
+ private var outputs: [any MediaMixerOutput] = []
+ private var subscriptions: [Task] = []
+ private var isInBackground = false
+ private lazy var audioIO = AudioCaptureUnit(session, isMultiTrackAudioMixingEnabled: isMultiTrackAudioMixingEnabled)
+ private lazy var videoIO = VideoCaptureUnit(session)
+ private lazy var session: (any CaptureSessionConvertible) = captureSessionMode.makeSession()
+ @ScreenActor
+ private lazy var displayLink = DisplayLinkChoreographer()
+
+ /// Creates a new instance.
+ ///
+ /// - Parameters:
+ /// - captureSessionMode: Specifies the capture session mode.
+ /// - multiTrackAudioMixingEnabled: Specifies the feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit.
+ public init(
+ captureSessionMode: CaptureSessionMode = .single,
+ multiTrackAudioMixingEnabled: Bool = false
+ ) {
+ self.captureSessionMode = captureSessionMode
+ self.isMultiTrackAudioMixingEnabled = multiTrackAudioMixingEnabled
+ }
+
+ /// Attaches a video device.
+ ///
+ /// If you want to use the multi-camera feature, please make create a MediaMixer with a multiCamSession mode for iOS.
+ /// ```swift
+ /// let mixer = MediaMixer(captureSessionMode: .multi)
+ /// ```
+ @available(tvOS 17.0, *)
+ public func attachVideo(_ device: AVCaptureDevice?, track: UInt8 = 0, configuration: VideoDeviceConfigurationBlock? = nil) async throws {
+ return try await withCheckedThrowingContinuation { continuation in
+ do {
+ try videoIO.attachVideo(track, device: device, configuration: configuration)
+ continuation.resume()
+ } catch {
+ continuation.resume(throwing: Error.failedToAttach(error))
+ }
+ }
+ }
+
+ /// Configurations for a video device.
+ @available(tvOS 17.0, *)
+ public func configuration(video track: UInt8, configuration: VideoDeviceConfigurationBlock) throws {
+ guard let unit = videoIO.devices[track] else {
+ throw Error.deviceNotFound
+ }
+ try configuration(unit)
+ }
+
+ #if os(iOS) || os(macOS) || os(tvOS)
+ /// Attaches an audio device.
+ ///
+ /// - Attention: You can perform multi-microphone capture by specifying as follows on macOS. Unfortunately, it seems that only one microphone is available on iOS.
+ ///
+ /// ```swift
+ /// let mixer = MediaMixer(multiTrackAudioMixingEnabled: true)
+ ///
+ /// var audios = AVCaptureDevice.devices(for: .audio)
+ /// if let device = audios.removeFirst() {
+ /// mixer.attachAudio(device, track: 0)
+ /// }
+ /// if let device = audios.removeFirst() {
+ /// mixer.attachAudio(device, track: 1)
+ /// }
+ /// ```
+ @available(tvOS 17.0, *)
+ public func attachAudio(_ device: AVCaptureDevice?, track: UInt8 = 0, configuration: AudioDeviceConfigurationBlock? = nil) async throws {
+ return try await withCheckedThrowingContinuation { continuation in
+ do {
+ try audioIO.attachAudio(track, device: device, configuration: configuration)
+ continuation.resume()
+ } catch {
+ continuation.resume(throwing: Error.failedToAttach(error))
+ }
+ }
+ }
+
+ /// Configurations for an audio device.
+ @available(tvOS 17.0, *)
+ public func configuration(audio track: UInt8, configuration: AudioDeviceConfigurationBlock) throws {
+ guard let unit = audioIO.devices[track] else {
+ throw Error.deviceNotFound
+ }
+ try configuration(unit)
+ }
+
+ /// Sets the device torch indicating wheter the turn on(TRUE) or not(FALSE).
+ public func setTorchEnabled(_ torchEnabled: Bool) {
+ videoIO.isTorchEnabled = torchEnabled
+ }
+
+ /// Sets the sessionPreset for the AVCaptureSession.
+ @available(tvOS 17.0, *)
+ public func setSessionPreset(_ sessionPreset: AVCaptureSession.Preset) {
+ session.sessionPreset = sessionPreset
+ }
+ #endif
+
+ #if os(iOS) || os(macOS)
+ /// Sets the video orientation for stream.
+ public func setVideoOrientation(_ videoOrientation: AVCaptureVideoOrientation) {
+ videoIO.videoOrientation = videoOrientation
+ // https://github.com/shogo4405/HaishinKit.swift/issues/190
+ if videoIO.isTorchEnabled {
+ videoIO.isTorchEnabled = true
+ }
+ }
+ #endif
+
+ /// Appends a CMSampleBuffer.
+ /// - Parameters:
+ /// - sampleBuffer:The sample buffer to append.
+ /// - track: Track number used for mixing
+ public func append(_ sampleBuffer: CMSampleBuffer, track: UInt8 = 0) {
+ switch sampleBuffer.formatDescription?.mediaType {
+ case .audio?:
+ audioIO.append(track, buffer: sampleBuffer)
+ case .video?:
+ videoIO.append(track, buffer: sampleBuffer)
+ default:
+ break
+ }
+ }
+
+ /// Sets the video mixier settings.
+ public func setVideoMixerSettings(_ settings: VideoMixerSettings) {
+ let mode = self.videoMixerSettings.mode
+ if mode != settings.mode {
+ setVideoRenderingMode(settings.mode)
+ }
+ videoIO.mixerSettings = settings
+ Task { @ScreenActor in
+ screen.videoTrackScreenObject.track = settings.mainTrack
+ }
+ }
+
+ /// Sets the output frame rate of the mixer.
+ ///
+ /// This is distinct from the camera capture rate, which can be configured separately as shown below.
+ /// ```swift
+ /// try? await mixer.configuration(video: 0) { video in
+ /// try? video.setFrameRate(fps)
+ /// }
+ /// ```
+ public func setFrameRate(_ frameRate: Float64) throws {
+ switch videoMixerSettings.mode {
+ case .passthrough:
+ if #available(tvOS 17.0, *) {
+ try videoIO.devices.first?.value.setFrameRate(frameRate)
+ }
+ case .offscreen:
+ Task { @ScreenActor in
+ displayLink.preferredFramesPerSecond = Int(frameRate)
+ }
+ }
+ self.frameRate = frameRate
+ }
+
+ /// Sets the dynamic range mode.
+ ///
+ /// Warnings: It takes some time for changes to be applied to the camera device, so it’s better not to modify it dynamically during a live stream.
+ public func setDynamicRangeMode(_ dynamicRangeMode: DynamicRangeMode) throws {
+ guard self.dynamicRangeMode != dynamicRangeMode else {
+ return
+ }
+ Task { @ScreenActor in
+ screen.dynamicRangeMode = dynamicRangeMode
+ }
+ videoIO.dynamicRangeMode = dynamicRangeMode
+ self.dynamicRangeMode = dynamicRangeMode
+ }
+
+ /// Sets the audio mixer settings.
+ public func setAudioMixerSettings(_ settings: AudioMixerSettings) {
+ audioIO.mixerSettings = settings
+ }
+
+ /// Sets the audio monitoring enabled or not.
+ public func setMonitoringEnabled(_ monitoringEnabled: Bool) {
+ audioIO.isMonitoringEnabled = monitoringEnabled
+ }
+
+ /// Starts capturing from input devices.
+ ///
+ /// Internally, it is called either when the view is attached or just before publishing. In other cases, please call this method if you want to manually start the capture.
+ @available(tvOS 17.0, *)
+ public func startCapturing() {
+ guard !session.isRunning else {
+ session.startRunningIfNeeded()
+ return
+ }
+ session.startRunning()
+ let synchronizationClock = session.synchronizationClock
+ Task { @ScreenActor in
+ screen.synchronizationClock = synchronizationClock
+ }
+ Task {
+ for await runtimeError in session.runtimeError {
+ await sessionRuntimeErrorOccured(runtimeError)
+ }
+ }
+ }
+
+ /// Stops capturing from input devices.
+ @available(tvOS 17.0, *)
+ public func stopCapturing() {
+ guard session.isRunning else {
+ return
+ }
+ session.stopRunning()
+ Task { @ScreenActor in
+ screen.synchronizationClock = nil
+ }
+ }
+
+ /// Appends an AVAudioBuffer.
+ /// - Parameters:
+ /// - audioBuffer:The audio buffer to append.
+ /// - when: The audio time to append.
+ /// - track: Track number used for mixing.
+ public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime, track: UInt8 = 0) {
+ audioIO.append(track, buffer: audioBuffer, when: when)
+ }
+
+ /// Configurations for the AVCaptureSession.
+ /// - Attention: Internally, there is no need for developers to call beginConfiguration() and func commitConfiguration() as they are called automatically.
+ @available(tvOS 17.0, *)
+ public func configuration(_ lambda: @Sendable (_ session: AVCaptureSession) throws -> Void) rethrows {
+ try session.configuration(lambda)
+ }
+
+ /// Adds an output observer.
+ public func addOutput(_ output: some MediaMixerOutput) {
+ guard !outputs.contains(where: { $0 === output }) else {
+ return
+ }
+ outputs.append(output)
+ }
+
+ /// Removes an output observer.
+ public func removeOutput(_ output: some MediaMixerOutput) {
+ if let index = outputs.firstIndex(where: { $0 === output }) {
+ outputs.remove(at: index)
+ }
+ }
+
+ private func setVideoRenderingMode(_ mode: VideoMixerSettings.Mode) {
+ guard isRunning else {
+ return
+ }
+ switch mode {
+ case .passthrough:
+ Task { @ScreenActor in
+ displayLink.stopRunning()
+ }
+ case .offscreen:
+ Task { @ScreenActor in
+ displayLink.preferredFramesPerSecond = await Int(frameRate)
+ displayLink.startRunning()
+ for await updateFrame in displayLink.updateFrames {
+ guard let buffer = screen.makeSampleBuffer(updateFrame) else {
+ continue
+ }
+ for output in await self.outputs where await output.videoTrackId == UInt8.max {
+ output.mixer(self, didOutput: buffer)
+ }
+ }
+ }
+ }
+ }
+
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ private func setInBackground(_ isInBackground: Bool) {
+ self.isInBackground = isInBackground
+ guard #available(tvOS 17.0, *), !session.isMultitaskingCameraAccessEnabled else {
+ return
+ }
+ if isInBackground {
+ videoIO.suspend()
+ } else {
+ videoIO.resume()
+ session.startRunningIfNeeded()
+ }
+ }
+
+ @available(tvOS 17.0, *)
+ private func didAudioSessionInterruption(_ notification: Notification) {
+ guard
+ let userInfo = notification.userInfo,
+ let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
+ let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
+ return
+ }
+ switch type {
+ case .began:
+ // video capture continues even while an incoming call is ringing.
+ audioIO.suspend()
+ session.startRunningIfNeeded()
+ logger.info("Audio suspended due to system interruption.")
+ case .ended:
+ let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt
+ let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue ?? 0)
+ if options.contains(.shouldResume) {
+ audioIO.resume()
+ }
+ logger.info("Audio resumed after system interruption")
+ default: ()
+ }
+ }
+ #endif
+
+ @available(tvOS 17.0, *)
+ private func sessionRuntimeErrorOccured(_ error: AVError) async {
+ switch error.code {
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ case .mediaServicesWereReset:
+ session.startRunningIfNeeded()
+ #endif
+ #if os(iOS) || os(tvOS) || os(macOS)
+ case .unsupportedDeviceActiveFormat:
+ guard let device = error.device, let format = device.videoFormat(
+ width: session.sessionPreset.width ?? Int32.max,
+ height: session.sessionPreset.height ?? Int32.max,
+ frameRate: frameRate,
+ isMultiCamSupported: session.isMultiCamSessionEnabled
+ ), device.activeFormat != format else {
+ return
+ }
+ do {
+ try device.lockForConfiguration()
+ device.activeFormat = format
+ if format.isFrameRateSupported(frameRate) {
+ device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ }
+ device.unlockForConfiguration()
+ session.startRunningIfNeeded()
+ } catch {
+ logger.warn(error)
+ }
+ #endif
+ case .unknown:
+ // AVFoundationErrorDomain Code=-11800 "The operation could not be completed"
+ if error.errorCode == -11800 && !isInBackground {
+ session.startRunningIfNeeded()
+ }
+ default:
+ break
+ }
+ }
+}
+
+extension MediaMixer: AsyncRunner {
+ // MARK: AsyncRunner
+ public func startRunning() async {
+ guard !isRunning else {
+ return
+ }
+ isRunning = true
+ setVideoRenderingMode(videoMixerSettings.mode)
+ if #available(tvOS 17.0, *) {
+ startCapturing()
+ }
+ Task {
+ for await inputs in videoIO.inputs {
+ Task { @ScreenActor in
+ let videoMixerSettings = await self.videoMixerSettings
+ guard videoMixerSettings.mode == .offscreen else {
+ return
+ }
+ let sampleBuffer = inputs.1
+ screen.append(inputs.0, buffer: sampleBuffer)
+ if videoMixerSettings.mainTrack == inputs.0 {
+ screen.setVideoCaptureLatency(sampleBuffer.presentationTimeStamp)
+ }
+ }
+ for output in outputs where await output.videoTrackId == inputs.0 {
+ output.mixer(self, didOutput: inputs.1)
+ }
+ }
+ }
+ Task {
+ for await video in videoIO.output {
+ for output in outputs where await output.videoTrackId == UInt8.max {
+ output.mixer(self, didOutput: video)
+ }
+ }
+ }
+ Task {
+ for await audio in audioIO.output {
+ for output in outputs where await output.audioTrackId == UInt8.max {
+ output.mixer(self, didOutput: audio.0, when: audio.1)
+ }
+ }
+ }
+ #if os(iOS) || os(tvOS) || os(visionOS)
+ subscriptions.append(Task {
+ for await _ in NotificationCenter.default.notifications(
+ named: UIApplication.didEnterBackgroundNotification
+ ) {
+ setInBackground(true)
+ }
+ })
+ subscriptions.append(Task {
+ for await _ in NotificationCenter.default.notifications(
+ named: UIApplication.willEnterForegroundNotification
+ ) {
+ setInBackground(false)
+ }
+ })
+ if #available(tvOS 17.0, *) {
+ subscriptions.append(Task {
+ for await notification in NotificationCenter.default.notifications(
+ named: AVAudioSession.interruptionNotification,
+ object: AVAudioSession.sharedInstance()
+ ) {
+ didAudioSessionInterruption(notification)
+ }
+ })
+ }
+ #endif
+ }
+
+ public func stopRunning() async {
+ guard isRunning else {
+ return
+ }
+ if #available(tvOS 17.0, *) {
+ stopCapturing()
+ }
+ audioIO.finish()
+ videoIO.finish()
+ subscriptions.forEach { $0.cancel() }
+ subscriptions.removeAll()
+ // Wait for the task to finish to prevent memory leaks.
+ await Task { @ScreenActor in
+ displayLink.stopRunning()
+ screen.reset()
+ }.value
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift
new file mode 100644
index 000000000..a8681dc9d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift
@@ -0,0 +1,15 @@
+import AVFoundation
+
+/// A delegate protocol implements to receive stream output events.
+public protocol MediaMixerOutput: AnyObject, Sendable {
+ /// Tells the receiver to a video track id.
+ var videoTrackId: UInt8? { get async }
+ /// Tells the receiver to an audio track id.
+ var audioTrackId: UInt8? { get async }
+ /// Tells the receiver to a video buffer incoming.
+ func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer)
+ /// Tells the receiver to an audio buffer incoming.
+ func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime)
+ /// Selects track id for streaming.
+ func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift
new file mode 100644
index 000000000..4f298c308
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift
@@ -0,0 +1,182 @@
+import AVFoundation
+import CoreImage
+
+final class VideoCaptureUnit: CaptureUnit {
+ enum Error: Swift.Error {
+ case multiCamNotSupported
+ }
+
+ let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCaptureUnit.lock")
+
+ private(set) var isSuspended = false
+
+ var mixerSettings: VideoMixerSettings {
+ get {
+ return videoMixer.settings
+ }
+ set {
+ videoMixer.settings = newValue
+ }
+ }
+
+ var inputFormats: [UInt8: CMFormatDescription] {
+ return videoMixer.inputFormats
+ }
+
+ #if os(iOS) || os(tvOS) || os(macOS)
+ var isTorchEnabled = false {
+ didSet {
+ guard #available(tvOS 17.0, *) else {
+ return
+ }
+ setTorchMode(isTorchEnabled ? .on : .off)
+ }
+ }
+ #endif
+
+ @available(tvOS 17.0, *)
+ var hasDevice: Bool {
+ !devices.lazy.filter { $0.value.device != nil }.isEmpty
+ }
+
+ #if os(iOS) || os(macOS)
+ var videoOrientation: AVCaptureVideoOrientation = .portrait {
+ didSet {
+ guard videoOrientation != oldValue else {
+ return
+ }
+ session.configuration { _ in
+ for capture in devices.values {
+ capture.videoOrientation = videoOrientation
+ }
+ }
+ }
+ }
+ #endif
+
+ @AsyncStreamedFlow
+ var inputs: AsyncStream<(UInt8, CMSampleBuffer)>
+
+ @AsyncStreamedFlow
+ var output: AsyncStream
+
+ var dynamicRangeMode: DynamicRangeMode = .sdr {
+ didSet {
+ guard dynamicRangeMode != oldValue, #available(tvOS 17.0, *) else {
+ return
+ }
+ try? session.configuration { _ in
+ for capture in devices.values {
+ try capture.setDynamicRangeMode(dynamicRangeMode)
+ }
+ }
+ }
+ }
+
+ private lazy var videoMixer = {
+ var videoMixer = VideoMixer()
+ videoMixer.delegate = self
+ return videoMixer
+ }()
+
+ #if os(tvOS)
+ private var _devices: [UInt8: Any] = [:]
+ @available(tvOS 17.0, *)
+ var devices: [UInt8: VideoDeviceUnit] {
+ get {
+ _devices as! [UInt8: VideoDeviceUnit]
+ }
+ set {
+ _devices = newValue
+ }
+ }
+ #elseif os(iOS) || os(macOS) || os(visionOS)
+ var devices: [UInt8: VideoDeviceUnit] = [:]
+ #endif
+
+ private let session: (any CaptureSessionConvertible)
+
+ init(_ session: (some CaptureSessionConvertible)) {
+ self.session = session
+ }
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer) {
+ videoMixer.append(track, sampleBuffer: buffer)
+ }
+
+ @available(tvOS 17.0, *)
+ func attachVideo(_ track: UInt8, device: AVCaptureDevice?, configuration: VideoDeviceConfigurationBlock?) throws {
+ try session.configuration { _ in
+ session.detachCapture(devices[track])
+ videoMixer.reset(track)
+ devices[track] = nil
+ if let device {
+ if hasDevice && session.isMultiCamSessionEnabled == false {
+ throw Error.multiCamNotSupported
+ }
+ let capture = try VideoDeviceUnit(track, device: device)
+ try? capture.setDynamicRangeMode(dynamicRangeMode)
+ #if os(iOS) || os(macOS)
+ capture.videoOrientation = videoOrientation
+ #endif
+ capture.setSampleBufferDelegate(self)
+ try? configuration?(capture)
+ session.attachCapture(capture)
+ capture.apply()
+ devices[track] = capture
+ }
+ }
+ }
+
+ #if os(iOS) || os(tvOS) || os(macOS)
+ @available(tvOS 17.0, *)
+ func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) {
+ for capture in devices.values {
+ capture.setTorchMode(torchMode)
+ }
+ }
+ #endif
+
+ @available(tvOS 17.0, *)
+ func makeDataOutput(_ track: UInt8) -> VideoCaptureUnitDataOutput {
+ return .init(track: track, videoMixer: videoMixer)
+ }
+
+ @available(tvOS 17.0, *)
+ func suspend() {
+ guard !isSuspended else {
+ return
+ }
+ for capture in devices.values {
+ session.detachCapture(capture)
+ }
+ isSuspended = true
+ }
+
+ @available(tvOS 17.0, *)
+ func resume() {
+ guard isSuspended else {
+ return
+ }
+ for capture in devices.values {
+ session.attachCapture(capture)
+ }
+ isSuspended = false
+ }
+
+ func finish() {
+ _inputs.finish()
+ _output.finish()
+ }
+}
+
+extension VideoCaptureUnit: VideoMixerDelegate {
+ // MARK: VideoMixerDelegate
+ func videoMixer(_ videoMixer: VideoMixer, track: UInt8, didInput sampleBuffer: CMSampleBuffer) {
+ _inputs.yield((track, sampleBuffer))
+ }
+
+ func videoMixer(_ videoMixer: VideoMixer, didOutput sampleBuffer: CMSampleBuffer) {
+ _output.yield(sampleBuffer)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift
new file mode 100644
index 000000000..4288c3d41
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift
@@ -0,0 +1,209 @@
+import AVFoundation
+import Foundation
+
+/// Configuration calback block for a VideoDeviceUnit.
+@available(tvOS 17.0, *)
+public typealias VideoDeviceConfigurationBlock = @Sendable (VideoDeviceUnit) throws -> Void
+
+/// An object that provides the interface to control the AVCaptureDevice's transport behavior.
+@available(tvOS 17.0, *)
+public final class VideoDeviceUnit: DeviceUnit {
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// The frameRate isn’t supported.
+ case unsupportedFrameRate
+ /// The dynamic range mode isn't supported.
+ case unsupportedDynamicRangeMode(_ mode: DynamicRangeMode)
+ }
+
+ /// The output type that this capture video data output..
+ public typealias Output = AVCaptureVideoDataOutput
+
+ /// The default color format.
+ public static let colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+
+ /// The device object.
+ public private(set) var device: AVCaptureDevice?
+
+ /// The frame rate for capturing video frame.
+ public private(set) var frameRate = MediaMixer.defaultFrameRate
+
+ /// Specifies the video capture color format.
+ public var colorFormat = VideoDeviceUnit.colorFormat
+
+ /// The track number.
+ public let track: UInt8
+ /// The input data to a cupture session.
+ public private(set) var input: AVCaptureInput?
+ /// The output data to a sample buffers.
+ public private(set) var output: Output? {
+ didSet {
+ oldValue?.setSampleBufferDelegate(nil, queue: nil)
+ guard let output else {
+ return
+ }
+ output.alwaysDiscardsLateVideoFrames = true
+ }
+ }
+ /// The connection from a capture input to a capture output.
+ public private(set) var connection: AVCaptureConnection?
+
+ #if os(iOS) || os(macOS)
+ /// Specifies the videoOrientation indicates whether to rotate the video flowing through the connection to a given orientation.
+ public var videoOrientation: AVCaptureVideoOrientation = .portrait {
+ didSet {
+ output?.connections.filter { $0.isVideoOrientationSupported }.forEach {
+ $0.videoOrientation = videoOrientation
+ }
+ }
+ }
+ #endif
+
+ #if os(iOS) || os(macOS) || os(tvOS)
+ /// Spcifies the video mirroed indicates whether the video flowing through the connection should be mirrored about its vertical axis.
+ public var isVideoMirrored = false {
+ didSet {
+ output?.connections.filter { $0.isVideoMirroringSupported }.forEach {
+ $0.isVideoMirrored = isVideoMirrored
+ }
+ }
+ }
+ #endif
+
+ #if os(iOS)
+ /// Specifies the preferredVideoStabilizationMode most appropriate for use with the connection.
+ public var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode = .off {
+ didSet {
+ output?.connections.filter { $0.isVideoStabilizationSupported }.forEach {
+ $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode
+ }
+ }
+ }
+ #endif
+
+ private var dynamicRangeMode: DynamicRangeMode = .sdr
+ private var dataOutput: VideoCaptureUnitDataOutput?
+
+ init(_ track: UInt8, device: AVCaptureDevice) throws {
+ self.track = track
+ input = try AVCaptureDeviceInput(device: device)
+ self.output = AVCaptureVideoDataOutput()
+ self.device = device
+ #if os(iOS)
+ if let output, let port = input?.ports.first(where: { $0.mediaType == .video && $0.sourceDeviceType == device.deviceType && $0.sourceDevicePosition == device.position }) {
+ connection = AVCaptureConnection(inputPorts: [port], output: output)
+ } else {
+ connection = nil
+ }
+ #elseif os(tvOS) || os(macOS)
+ if let output, let port = input?.ports.first(where: { $0.mediaType == .video }) {
+ connection = AVCaptureConnection(inputPorts: [port], output: output)
+ } else {
+ connection = nil
+ }
+ #endif
+ }
+
+ /// Sets the frame rate of a device capture.
+ public func setFrameRate(_ frameRate: Float64) throws {
+ guard let device else {
+ return
+ }
+ try device.lockForConfiguration()
+ defer {
+ device.unlockForConfiguration()
+ }
+ if device.activeFormat.isFrameRateSupported(frameRate) {
+ device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ } else {
+ if let format = device.videoFormat(
+ width: device.activeFormat.formatDescription.dimensions.width,
+ height: device.activeFormat.formatDescription.dimensions.height,
+ frameRate: frameRate,
+ isMultiCamSupported: device.activeFormat.isMultiCamSupported
+ ) {
+ device.activeFormat = format
+ device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
+ } else {
+ throw Error.unsupportedFrameRate
+ }
+ }
+ self.frameRate = frameRate
+ }
+
+ func setDynamicRangeMode(_ dynamicRangeMode: DynamicRangeMode) throws {
+ guard let device, self.dynamicRangeMode != dynamicRangeMode else {
+ return
+ }
+ try device.lockForConfiguration()
+ defer {
+ device.unlockForConfiguration()
+ }
+ let activeFormat = device.activeFormat
+ if let format = device.formats.filter({ $0.formatDescription.dimensions.size == activeFormat.formatDescription.dimensions.size }).first(where: { $0.formatDescription.mediaSubType.rawValue == dynamicRangeMode.videoFormat }) {
+ device.activeFormat = format
+ self.dynamicRangeMode = dynamicRangeMode
+ } else {
+ throw Error.unsupportedDynamicRangeMode(dynamicRangeMode)
+ }
+ }
+
+ #if os(iOS) || os(tvOS) || os(macOS)
+ func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) {
+ guard let device, device.isTorchModeSupported(torchMode) else {
+ return
+ }
+ do {
+ try device.lockForConfiguration()
+ defer {
+ device.unlockForConfiguration()
+ }
+ device.torchMode = torchMode
+ } catch {
+ logger.error("while setting torch:", error)
+ }
+ }
+ #endif
+
+ func setSampleBufferDelegate(_ videoUnit: VideoCaptureUnit?) {
+ dataOutput = videoUnit?.makeDataOutput(track)
+ output?.setSampleBufferDelegate(dataOutput, queue: videoUnit?.lockQueue)
+ }
+
+ func apply() {
+ #if os(iOS) || os(tvOS) || os(macOS)
+ output?.connections.forEach {
+ if $0.isVideoMirroringSupported {
+ $0.isVideoMirrored = isVideoMirrored
+ }
+ #if os(iOS) || os(macOS)
+ if $0.isVideoOrientationSupported {
+ $0.videoOrientation = videoOrientation
+ }
+ #endif
+ #if os(iOS)
+ if $0.isVideoStabilizationSupported {
+ $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode
+ }
+ #endif
+ }
+ #endif
+ }
+}
+
+@available(tvOS 17.0, *)
+final class VideoCaptureUnitDataOutput: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
+ private let track: UInt8
+ private let videoMixer: VideoMixer
+
+ init(track: UInt8, videoMixer: VideoMixer) {
+ self.track = track
+ self.videoMixer = videoMixer
+ }
+
+ func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
+ videoMixer.append(track, sampleBuffer: sampleBuffer)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift
new file mode 100644
index 000000000..3def985dc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift
@@ -0,0 +1,53 @@
+import CoreImage
+import CoreMedia
+import Foundation
+
+protocol VideoMixerDelegate: AnyObject {
+ func videoMixer(_ videoMixer: VideoMixer, track: UInt8, didInput sampleBuffer: CMSampleBuffer)
+ func videoMixer(_ videoMixer: VideoMixer, didOutput sampleBuffer: CMSampleBuffer)
+}
+
+private let kVideoMixer_lockFlags = CVPixelBufferLockFlags(rawValue: .zero)
+
+final class VideoMixer {
+ weak var delegate: T?
+ var settings: VideoMixerSettings = .default
+ private(set) var inputFormats: [UInt8: CMFormatDescription] = [:]
+ private var currentPixelBuffer: CVPixelBuffer?
+
+ func append(_ track: UInt8, sampleBuffer: CMSampleBuffer) {
+ inputFormats[track] = sampleBuffer.formatDescription
+ delegate?.videoMixer(self, track: track, didInput: sampleBuffer)
+ switch settings.mode {
+ case .offscreen:
+ break
+ case .passthrough:
+ if settings.mainTrack == track {
+ outputSampleBuffer(sampleBuffer)
+ }
+ }
+ }
+
+ func reset(_ track: UInt8) {
+ inputFormats[track] = nil
+ }
+
+ @inline(__always)
+ private func outputSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
+ defer {
+ currentPixelBuffer = sampleBuffer.imageBuffer
+ }
+ guard settings.isMuted else {
+ delegate?.videoMixer(self, didOutput: sampleBuffer)
+ return
+ }
+ do {
+ try sampleBuffer.imageBuffer?.mutate(kVideoMixer_lockFlags) { imageBuffer in
+ try imageBuffer.copy(currentPixelBuffer)
+ }
+ delegate?.videoMixer(self, didOutput: sampleBuffer)
+ } catch {
+ logger.warn(error)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift
new file mode 100644
index 000000000..cdc08c98a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift
@@ -0,0 +1,33 @@
+import Accelerate
+import CoreMedia
+import Foundation
+
+/// Constraints on the video mixier settings.
+public struct VideoMixerSettings: Codable, Sendable {
+ /// The default setting for the mixer.
+ public static let `default`: VideoMixerSettings = .init()
+
+ /// The type of image rendering mode.
+ public enum Mode: String, Codable, Sendable {
+ /// The input buffer will be used as it is. No effects will be applied.
+ case passthrough
+ /// Off-screen rendering will be performed to allow for more flexible drawing.
+ case offscreen
+ }
+
+ /// Specifies the image rendering mode.
+ public var mode: Mode
+
+ /// Specifies the muted indicies whether freeze video signal or not.
+ public var isMuted: Bool
+
+ /// Specifies the main track number.
+ public var mainTrack: UInt8
+
+ /// Create a new instance.
+ public init(mode: Mode = .passthrough, isMuted: Bool = false, mainTrack: UInt8 = 0) {
+ self.mode = mode
+ self.isMuted = isMuted
+ self.mainTrack = mainTrack
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift
new file mode 100644
index 000000000..e1330c1a1
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift
@@ -0,0 +1,10 @@
+import Foundation
+
+/// The interface is the foundation of the RTMPConnection.
+public protocol NetworkConnection: Actor {
+ /// The instance connected to server(true) or not(false).
+ var connected: Bool { get async }
+
+ /// Closes the connection from the server.
+ func close() async throws
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift
new file mode 100644
index 000000000..caa1ee0d9
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift
@@ -0,0 +1,109 @@
+import Foundation
+
+/// An objec thatt provides the RTMPConnection, SRTConnection's monitoring events.
+package final actor NetworkMonitor {
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// An invalid internal stare.
+ case invalidState
+ }
+
+ /// An asynchronous sequence for network monitoring event.
+ public var event: AsyncStream {
+ AsyncStream { continuation in
+ self.continuation = continuation
+ }
+ }
+
+ public private(set) var isRunning = false
+ private var timer: Task? {
+ didSet {
+ oldValue?.cancel()
+ }
+ }
+ private var measureInterval = 3
+ private var currentBytesInPerSecond = 0
+ private var currentBytesOutPerSecond = 0
+ private var previousTotalBytesIn = 0
+ private var previousTotalBytesOut = 0
+ private var previousQueueBytesOut: [Int] = []
+ private var continuation: AsyncStream.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+ private weak var reporter: (any NetworkTransportReporter)?
+
+ /// Creates a new instance.
+ package init(_ reporter: some NetworkTransportReporter) {
+ self.reporter = reporter
+ }
+
+ private func collect() async throws -> NetworkMonitorEvent {
+ guard let report = await reporter?.makeNetworkTransportReport() else {
+ throw Error.invalidState
+ }
+ let totalBytesIn = report.totalBytesIn
+ let totalBytesOut = report.totalBytesOut
+ let queueBytesOut = report.queueBytesOut
+ currentBytesInPerSecond = totalBytesIn - previousTotalBytesIn
+ currentBytesOutPerSecond = totalBytesOut - previousTotalBytesOut
+ previousTotalBytesIn = totalBytesIn
+ previousTotalBytesOut = totalBytesOut
+ previousQueueBytesOut.append(queueBytesOut)
+ let eventReport = NetworkMonitorReport(
+ totalBytesIn: totalBytesIn,
+ totalBytesOut: totalBytesOut,
+ currentQueueBytesOut: queueBytesOut,
+ currentBytesInPerSecond: currentBytesInPerSecond,
+ currentBytesOutPerSecond: currentBytesOutPerSecond
+ )
+ if measureInterval <= previousQueueBytesOut.count {
+ defer {
+ previousQueueBytesOut.removeFirst()
+ }
+ var total = 0
+ for i in 0.. NetworkMonitor
+ /// Makes a network transport report.
+ func makeNetworkTransportReport() async -> NetworkTransportReport
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift
new file mode 100644
index 000000000..0436cc3f8
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift
@@ -0,0 +1,118 @@
+import AVFoundation
+import CoreImage
+
+#if !os(visionOS)
+/// An object that manages offscreen rendering an asset resource.
+public final class AssetScreenObject: ScreenObject, ChromaKeyProcessable {
+ public var chromaKeyColor: CGColor?
+
+ /// The reading incidies whether assets reading or not.
+ public var isReading: Bool {
+ return reader?.status == .reading
+ }
+
+ /// The video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect {
+ didSet {
+ guard videoGravity != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ private var reader: AVAssetReader? {
+ didSet {
+ if let oldValue, oldValue.status == .reading {
+ oldValue.cancelReading()
+ }
+ }
+ }
+
+ private var sampleBuffer: CMSampleBuffer? {
+ didSet {
+ guard sampleBuffer != oldValue else {
+ return
+ }
+ if sampleBuffer == nil {
+ cancelReading()
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ private var startedAt: CMTime = .zero
+ private var videoTrackOutput: AVAssetReaderTrackOutput?
+ private var outputSettings = [
+ kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA
+ ] as [String: Any]
+
+ /// Prepares the asset reader to start reading.
+ public func startReading(_ asset: AVAsset) throws {
+ reader = try AVAssetReader(asset: asset)
+ guard let reader else {
+ return
+ }
+ let videoTrack = asset.tracks(withMediaType: .video).first
+ if let videoTrack {
+ let videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings)
+ videoTrackOutput.alwaysCopiesSampleData = false
+ reader.add(videoTrackOutput)
+ self.videoTrackOutput = videoTrackOutput
+ }
+ startedAt = CMClock.hostTimeClock.time
+ reader.startReading()
+ sampleBuffer = videoTrackOutput?.copyNextSampleBuffer()
+ }
+
+ /// Cancels and stops the reader's output.
+ public func cancelReading() {
+ reader = nil
+ sampleBuffer = nil
+ videoTrackOutput = nil
+ }
+
+ override public func makeBounds(_ size: CGSize) -> CGRect {
+ guard parent != nil, let image = sampleBuffer?.formatDescription?.dimensions.size else {
+ return super.makeBounds(size)
+ }
+ let bounds = super.makeBounds(size)
+ switch videoGravity {
+ case .resizeAspect:
+ let scale = min(bounds.size.width / image.width, bounds.size.height / image.height)
+ let scaleSize = CGSize(width: image.width * scale, height: image.height * scale)
+ return super.makeBounds(scaleSize)
+ case .resizeAspectFill:
+ return bounds
+ default:
+ return bounds
+ }
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ guard let image: CIImage = makeImage(renderer) else {
+ return nil
+ }
+ return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent))
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else {
+ return nil
+ }
+ return CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale(
+ bounds.size,
+ image: pixelBuffer.size
+ ))
+ }
+
+ override func draw(_ renderer: some ScreenRenderer) {
+ super.draw(renderer)
+ let duration = CMClock.hostTimeClock.time - startedAt
+ if let sampleBuffer, sampleBuffer.presentationTimeStamp <= duration {
+ self.sampleBuffer = videoTrackOutput?.copyNextSampleBuffer()
+ }
+ }
+}
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift
new file mode 100644
index 000000000..1838f4d60
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift
@@ -0,0 +1,178 @@
+import Accelerate
+import Foundation
+import simd
+
+/// A marker type with a chroma key processable screen object.
+@ScreenActor
+public protocol ChromaKeyProcessable {
+ /// Specifies the chroma key color.
+ var chromaKeyColor: CGColor? { get set }
+}
+
+final class ChromaKeyProcessor {
+ static let noFlags = vImage_Flags(kvImageNoFlags)
+ static let labColorSpace = CGColorSpace(name: CGColorSpace.genericLab)!
+
+ enum Error: Swift.Error {
+ case invalidState
+ }
+
+ private let entriesPerChannel = 32
+ private let sourceChannelCount = 3
+ private let destinationChannelCount = 1
+
+ private let srcFormat = vImage_CGImageFormat(
+ bitsPerComponent: 32,
+ bitsPerPixel: 32 * 3,
+ colorSpace: CGColorSpaceCreateDeviceRGB(),
+ bitmapInfo: CGBitmapInfo(rawValue: kCGBitmapByteOrder32Host.rawValue | CGBitmapInfo.floatComponents.rawValue | CGImageAlphaInfo.none.rawValue))
+
+ private let destFormat = vImage_CGImageFormat(
+ bitsPerComponent: 32,
+ bitsPerPixel: 32 * 3,
+ colorSpace: labColorSpace,
+ bitmapInfo: CGBitmapInfo(rawValue: kCGBitmapByteOrder32Host.rawValue | CGBitmapInfo.floatComponents.rawValue | CGImageAlphaInfo.none.rawValue))
+
+ private var tables: [CGColor: vImage_MultidimensionalTable] = [:]
+ private var outputF: [String: vImage_Buffer] = [:]
+ private var output8: [String: vImage_Buffer] = [:]
+ private var buffers: [String: [vImage_Buffer]] = [:]
+ private let converter: vImageConverter
+ private var maxFloats: [Float] = [1.0, 1.0, 1.0, 1.0]
+ private var minFloats: [Float] = [0.0, 0.0, 0.0, 0.0]
+
+ init() throws {
+ guard let srcFormat, let destFormat else {
+ throw Error.invalidState
+ }
+ converter = try vImageConverter.make(sourceFormat: srcFormat, destinationFormat: destFormat)
+ }
+
+ deinit {
+ tables.forEach { vImageMultidimensionalTable_Release($0.value) }
+ output8.forEach { $0.value.free() }
+ outputF.forEach { $0.value.free() }
+ buffers.forEach { $0.value.forEach { $0.free() } }
+ }
+
+ func makeMask(_ source: inout vImage_Buffer, chromeKeyColor: CGColor) throws -> vImage_Buffer {
+ let key = "\(source.width):\(source.height)"
+ if tables[chromeKeyColor] == nil {
+ tables[chromeKeyColor] = try makeLookUpTable(chromeKeyColor, tolerance: 60)
+ }
+ if outputF[key] == nil {
+ outputF[key] = try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32)
+ }
+ if output8[key] == nil {
+ output8[key] = try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 8)
+ }
+ guard
+ let table = tables[chromeKeyColor],
+ let dest = outputF[key] else {
+ throw Error.invalidState
+ }
+ var dests: [vImage_Buffer] = [dest]
+ let srcs = try makePlanarFBuffers(&source)
+ vImageMultiDimensionalInterpolatedLookupTable_PlanarF(
+ srcs,
+ &dests,
+ nil,
+ table,
+ kvImageFullInterpolation,
+ vImage_Flags(kvImageNoFlags)
+ )
+ guard var result = output8[key] else {
+ throw Error.invalidState
+ }
+ vImageConvert_PlanarFtoPlanar8(&dests[0], &result, 1.0, 0.0, Self.noFlags)
+ return result
+ }
+
+ private func makePlanarFBuffers(_ source: inout vImage_Buffer) throws -> [vImage_Buffer] {
+ let key = "\(source.width):\(source.height)"
+ if buffers[key] == nil {
+ buffers[key] = [
+ try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32),
+ try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32),
+ try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32),
+ try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32)
+ ]
+ }
+ guard var buffers = buffers[key] else {
+ throw Error.invalidState
+ }
+ vImageConvert_ARGB8888toPlanarF(
+ &source,
+ &buffers[0],
+ &buffers[1],
+ &buffers[2],
+ &buffers[3],
+ &maxFloats,
+ &minFloats,
+ Self.noFlags)
+ return [
+ buffers[1],
+ buffers[2],
+ buffers[3]
+ ]
+ }
+
+ private func makeLookUpTable(_ chromaKeyColor: CGColor, tolerance: Float) throws -> vImage_MultidimensionalTable? {
+ let ramp = vDSP.ramp(in: 0 ... 1.0, count: Int(entriesPerChannel))
+ let lookupTableElementCount = Int(pow(Float(entriesPerChannel), Float(sourceChannelCount))) * Int(destinationChannelCount)
+ var lookupTableData = [UInt16].init(repeating: 0, count: lookupTableElementCount)
+ let chromaKeyRGB = chromaKeyColor.components ?? [0, 0, 0]
+ let chromaKeyLab = try rgbToLab(
+ r: chromaKeyRGB[0],
+ g: chromaKeyRGB.count > 1 ? chromaKeyRGB[1] : chromaKeyRGB[0],
+ b: chromaKeyRGB.count > 2 ? chromaKeyRGB[2] : chromaKeyRGB[0]
+ )
+ var bufferIndex = 0
+ for red in ramp {
+ for green in ramp {
+ for blue in ramp {
+ let lab = try rgbToLab(r: red, g: green, b: blue)
+ let distance = simd_distance(chromaKeyLab, lab)
+ let contrast = Float(20)
+ let offset = Float(0.25)
+ let alpha = saturate(tanh(((distance / tolerance ) - 0.5 - offset) * contrast))
+ lookupTableData[bufferIndex] = UInt16(alpha * Float(UInt16.max))
+ bufferIndex += 1
+ }
+ }
+ }
+ var entryCountPerSourceChannel = [UInt8](repeating: UInt8(entriesPerChannel), count: sourceChannelCount)
+ let result = vImageMultidimensionalTable_Create(
+ &lookupTableData,
+ 3,
+ 1,
+ &entryCountPerSourceChannel,
+ kvImageMDTableHint_Float,
+ vImage_Flags(kvImageNoFlags),
+ nil)
+ vImageMultidimensionalTable_Retain(result)
+ return result
+ }
+
+ private func rgbToLab(r: CGFloat, g: CGFloat, b: CGFloat) throws -> SIMD3 {
+ var data: [Float] = [Float(r), Float(g), Float(b)]
+ var srcPixelBuffer = data.withUnsafeMutableBufferPointer { pointer in
+ vImage_Buffer(data: pointer.baseAddress, height: 1, width: 1, rowBytes: 4 * 3)
+ }
+ var destPixelBuffer = try vImage_Buffer(width: 1, height: 1, bitsPerPixel: 32 * 3)
+ vImageConvert_AnyToAny(converter, &srcPixelBuffer, &destPixelBuffer, nil, vImage_Flags(kvImageNoFlags))
+ defer {
+ destPixelBuffer.free()
+ }
+ let result = destPixelBuffer.data.assumingMemoryBound(to: Float.self)
+ return .init(
+ result[0],
+ result[1],
+ result[2]
+ )
+ }
+
+ private func saturate(_ x: T) -> T {
+ return min(max(0, x), 1)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift
new file mode 100644
index 000000000..d3fe32ba4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift
@@ -0,0 +1,148 @@
+import Foundation
+
+#if os(macOS)
+import CoreVideo
+
+// swiftlint:disable attributes
+// CADisplayLink is deprecated, I've given up on making it conform to Sendable.
+final class DisplayLink: NSObject, @unchecked Sendable {
+ private static let preferredFramesPerSecond = 0
+
+ var isPaused = false {
+ didSet {
+ guard let displayLink, oldValue != isPaused else {
+ return
+ }
+ if isPaused {
+ CVDisplayLinkStop(displayLink)
+ } else {
+ CVDisplayLinkStart(displayLink)
+ }
+ }
+ }
+ var preferredFramesPerSecond = DisplayLink.preferredFramesPerSecond {
+ didSet {
+ guard preferredFramesPerSecond != oldValue else {
+ return
+ }
+ frameInterval = 1.0 / Double(preferredFramesPerSecond)
+ }
+ }
+ private(set) var duration = 0.0
+ private(set) var timestamp: CFTimeInterval = 0
+ private(set) var targetTimestamp: CFTimeInterval = 0
+ private var selector: Selector?
+ private var displayLink: CVDisplayLink?
+ private var frameInterval = 0.0
+ private weak var delegate: NSObject?
+
+ deinit {
+ selector = nil
+ }
+
+ init(target: NSObject, selector sel: Selector) {
+ super.init()
+ CVDisplayLinkCreateWithActiveCGDisplays(&displayLink)
+ guard let displayLink = displayLink else {
+ return
+ }
+ self.delegate = target
+ self.selector = sel
+ CVDisplayLinkSetOutputHandler(displayLink) { [weak self] _, inNow, _, _, _ -> CVReturn in
+ guard let self else {
+ return kCVReturnSuccess
+ }
+ if frameInterval == 0 || frameInterval <= inNow.pointee.timestamp - self.timestamp {
+ self.timestamp = Double(inNow.pointee.timestamp)
+ self.targetTimestamp = self.timestamp + frameInterval
+ _ = self.delegate?.perform(self.selector, with: self)
+ }
+ return kCVReturnSuccess
+ }
+ }
+
+ func add(to runloop: RunLoop, forMode mode: RunLoop.Mode) {
+ guard let displayLink, !isPaused else {
+ return
+ }
+ CVDisplayLinkStart(displayLink)
+ }
+
+ func invalidate() {
+ guard let displayLink, isPaused else {
+ return
+ }
+ CVDisplayLinkStop(displayLink)
+ }
+}
+
+extension CVTimeStamp {
+ @inlinable @inline(__always)
+ var timestamp: Double {
+ Double(self.hostTime) / Double(self.videoTimeScale)
+ }
+}
+
+// swiftlint:enable attributes
+
+#else
+import QuartzCore
+typealias DisplayLink = CADisplayLink
+#endif
+
+struct DisplayLinkTime {
+ let timestamp: TimeInterval
+ let targetTimestamp: TimeInterval
+}
+
+final class DisplayLinkChoreographer: NSObject {
+ private static let preferredFramesPerSecond = 0
+
+ var updateFrames: AsyncStream {
+ AsyncStream { continuation in
+ self.continutation = continuation
+ }
+ }
+ var preferredFramesPerSecond = DisplayLinkChoreographer.preferredFramesPerSecond {
+ didSet {
+ guard preferredFramesPerSecond != oldValue else {
+ return
+ }
+ displayLink?.preferredFramesPerSecond = preferredFramesPerSecond
+ }
+ }
+ private(set) var isRunning = false
+ private var displayLink: DisplayLink? {
+ didSet {
+ oldValue?.invalidate()
+ displayLink?.preferredFramesPerSecond = preferredFramesPerSecond
+ displayLink?.isPaused = false
+ displayLink?.add(to: .main, forMode: .common)
+ }
+ }
+ private var continutation: AsyncStream.Continuation?
+
+ @objc
+ private func update(displayLink: DisplayLink) {
+ continutation?.yield(.init(timestamp: displayLink.timestamp, targetTimestamp: displayLink.targetTimestamp))
+ }
+}
+
+extension DisplayLinkChoreographer: Runner {
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ displayLink = DisplayLink(target: self, selector: #selector(self.update(displayLink:)))
+ isRunning = true
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ isRunning = false
+ displayLink = nil
+ continutation?.finish()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift
new file mode 100644
index 000000000..445a5008d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift
@@ -0,0 +1,59 @@
+import CoreImage
+
+/// An object that manages offscreen rendering a cgImage source.
+public final class ImageScreenObject: ScreenObject {
+ /// Specifies the image.
+ public var cgImage: CGImage? {
+ didSet {
+ guard cgImage != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ let intersection = bounds.intersection(renderer.bounds)
+
+ guard bounds != intersection else {
+ return cgImage
+ }
+
+ // Handling when the drawing area is exceeded.
+ let x: CGFloat
+ switch horizontalAlignment {
+ case .left:
+ x = bounds.origin.x
+ case .center:
+ x = bounds.origin.x / 2
+ case .right:
+ x = 0.0
+ }
+
+ let y: CGFloat
+ switch verticalAlignment {
+ case .top:
+ y = 0.0
+ case .middle:
+ y = abs(bounds.origin.y) / 2
+ case .bottom:
+ y = abs(bounds.origin.y)
+ }
+
+ return cgImage?.cropping(to: .init(origin: .init(x: x, y: y), size: intersection.size))
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ guard let image: CGImage = makeImage(renderer) else {
+ return nil
+ }
+ return CIImage(cgImage: image)
+ }
+
+ override public func makeBounds(_ size: CGSize) -> CGRect {
+ guard let cgImage else {
+ return super.makeBounds(size)
+ }
+ return super.makeBounds(size == .zero ? cgImage.size : size)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift
new file mode 100644
index 000000000..303cbeced
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift
@@ -0,0 +1,25 @@
+import CoreImage
+
+final class RoundedRectangleFactory {
+ private var imageBuffers: [String: CIImage] = [:]
+
+ func cornerRadius(_ size: CGSize, cornerRadius: CGFloat) -> CIImage? {
+ let key = "\(size.width):\(size.height):\(cornerRadius)"
+ if let buffer = imageBuffers[key] {
+ return buffer
+ }
+ let roundedRect = CIFilter.roundedRectangleGenerator()
+ roundedRect.extent = .init(origin: .zero, size: size)
+ roundedRect.radius = Float(cornerRadius)
+ guard
+ let image = roundedRect.outputImage else {
+ return nil
+ }
+ imageBuffers[key] = image
+ return imageBuffers[key]
+ }
+
+ func removeAll() {
+ imageBuffers.removeAll()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift
new file mode 100644
index 000000000..90a7d04b7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift
@@ -0,0 +1,236 @@
+import AVFoundation
+import Foundation
+
+#if canImport(AppKit)
+import AppKit
+#endif
+
+#if canImport(UIKit)
+import UIKit
+#endif
+
+/// An interface a screen uses to inform its delegate.
+public protocol ScreenDelegate: AnyObject {
+ /// Tells the receiver to screen object layout phase.
+ func screen(_ screen: Screen, willLayout time: CMTime)
+}
+
+/// An object that manages offscreen rendering a foundation.
+public final class Screen: ScreenObjectContainerConvertible {
+ /// The default screen size.
+ public static let size = CGSize(width: 1280, height: 720)
+
+ private static let lockFlags = CVPixelBufferLockFlags(rawValue: 0)
+ private static let preferredTimescale: CMTimeScale = 1000000000
+
+ /// The total of child counts.
+ public var childCounts: Int {
+ return root.childCounts
+ }
+
+ /// Specifies the delegate object.
+ public weak var delegate: (any ScreenDelegate)?
+
+ /// Specifies the video size to use when output a video.
+ public var size: CGSize = Screen.size {
+ didSet {
+ guard size != oldValue else {
+ return
+ }
+ renderer.bounds = .init(origin: .zero, size: size)
+ CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool)
+ }
+ }
+
+ /// Specifies the gpu rendering enabled.
+ @available(*, deprecated)
+ public var isGPURendererEnabled = false {
+ didSet {
+ guard isGPURendererEnabled != oldValue else {
+ return
+ }
+ if isGPURendererEnabled {
+ renderer = ScreenRendererByGPU(dynamicRangeMode: dynamicRangeMode)
+ } else {
+ renderer = ScreenRendererByCPU(dynamicRangeMode: dynamicRangeMode)
+ }
+ }
+ }
+
+ #if os(macOS)
+ /// Specifies the background color.
+ public var backgroundColor: CGColor = NSColor.black.cgColor {
+ didSet {
+ guard backgroundColor != oldValue else {
+ return
+ }
+ renderer.backgroundColor = backgroundColor
+ }
+ }
+ #else
+ /// Specifies the background color.
+ public var backgroundColor: CGColor = UIColor.black.cgColor {
+ didSet {
+ guard backgroundColor != oldValue else {
+ return
+ }
+ renderer.backgroundColor = backgroundColor
+ }
+ }
+ #endif
+
+ var synchronizationClock: CMClock? {
+ get {
+ return renderer.synchronizationClock
+ }
+ set {
+ renderer.synchronizationClock = newValue
+ }
+ }
+ var dynamicRangeMode: DynamicRangeMode = .sdr {
+ didSet {
+ guard dynamicRangeMode != oldValue else {
+ return
+ }
+ if isGPURendererEnabled {
+ renderer = ScreenRendererByGPU(dynamicRangeMode: dynamicRangeMode)
+ } else {
+ renderer = ScreenRendererByCPU(dynamicRangeMode: dynamicRangeMode)
+ }
+ CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool)
+ }
+ }
+ private(set) var renderer: (any ScreenRenderer) = ScreenRendererByCPU(dynamicRangeMode: .sdr) {
+ didSet {
+ renderer.bounds = oldValue.bounds
+ renderer.backgroundColor = oldValue.backgroundColor
+ renderer.synchronizationClock = oldValue.synchronizationClock
+ }
+ }
+ private(set) var targetTimestamp: TimeInterval = 0.0
+ private(set) var videoTrackScreenObject = VideoTrackScreenObject()
+ private var videoCaptureLatency: TimeInterval = 0.0
+ private var root: ScreenObjectContainer = .init()
+ private var outputFormat: CMFormatDescription?
+ private var pixelBufferPool: CVPixelBufferPool? {
+ didSet {
+ outputFormat = nil
+ }
+ }
+ private var presentationTimeStamp: CMTime = .zero
+
+ /// Creates a screen object.
+ public init() {
+ try? addChild(videoTrackScreenObject)
+ CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool)
+ }
+
+ /// Adds the specified screen object as a child of the current screen object container.
+ public func addChild(_ child: ScreenObject?) throws {
+ try root.addChild(child)
+ }
+
+ /// Removes the specified screen object as a child of the current screen object container.
+ public func removeChild(_ child: ScreenObject?) {
+ root.removeChild(child)
+ }
+
+ /// Registers a video effect.
+ public func registerVideoEffect(_ effect: some VideoEffect) -> Bool {
+ return videoTrackScreenObject.registerVideoEffect(effect)
+ }
+
+ /// Unregisters a video effect.
+ public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool {
+ return videoTrackScreenObject.unregisterVideoEffect(effect)
+ }
+
+ func append(_ track: UInt8, buffer: CMSampleBuffer) {
+ let screens: [VideoTrackScreenObject] = root.getScreenObjects()
+ for screen in screens where screen.track == track {
+ screen.enqueue(buffer)
+ }
+ }
+
+ func makeSampleBuffer(_ updateFrame: DisplayLinkTime) -> CMSampleBuffer? {
+ defer {
+ targetTimestamp = updateFrame.targetTimestamp
+ }
+ var pixelBuffer: CVPixelBuffer?
+ pixelBufferPool?.createPixelBuffer(&pixelBuffer)
+ guard let pixelBuffer else {
+ return nil
+ }
+ if outputFormat == nil {
+ CMVideoFormatDescriptionCreateForImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: pixelBuffer,
+ formatDescriptionOut: &outputFormat
+ )
+ }
+ guard let outputFormat else {
+ return nil
+ }
+ if let dictionary = CVBufferCopyAttachments(pixelBuffer, .shouldNotPropagate) {
+ CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate)
+ }
+ let presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale)
+ guard self.presentationTimeStamp <= presentationTimeStamp else {
+ return nil
+ }
+ self.presentationTimeStamp = presentationTimeStamp
+ var timingInfo = CMSampleTimingInfo(
+ duration: CMTime(seconds: updateFrame.targetTimestamp - updateFrame.timestamp, preferredTimescale: Self.preferredTimescale),
+ presentationTimeStamp: presentationTimeStamp,
+ decodeTimeStamp: .invalid
+ )
+ var sampleBuffer: CMSampleBuffer?
+ guard CMSampleBufferCreateReadyWithImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: pixelBuffer,
+ formatDescription: outputFormat,
+ sampleTiming: &timingInfo,
+ sampleBufferOut: &sampleBuffer
+ ) == noErr else {
+ return nil
+ }
+ if let sampleBuffer {
+ return render(sampleBuffer)
+ } else {
+ return nil
+ }
+ }
+
+ func render(_ sampleBuffer: CMSampleBuffer) -> CMSampleBuffer {
+ try? sampleBuffer.imageBuffer?.lockBaseAddress(Self.lockFlags)
+ defer {
+ try? sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFlags)
+ }
+ renderer.presentationTimeStamp = sampleBuffer.presentationTimeStamp
+ renderer.setTarget(sampleBuffer.imageBuffer)
+ if let dimensions = sampleBuffer.formatDescription?.dimensions {
+ root.size = dimensions.size
+ }
+ delegate?.screen(self, willLayout: sampleBuffer.presentationTimeStamp)
+ root.layout(renderer)
+ root.draw(renderer)
+ renderer.render()
+ return sampleBuffer
+ }
+
+ func setVideoCaptureLatency(_ presentationTimeStamp: CMTime) {
+ guard 0 < targetTimestamp else {
+ return
+ }
+ let hostPresentationTimeStamp = presentationTimeStamp.convertTime(from: synchronizationClock)
+ let diff = ceil((targetTimestamp - hostPresentationTimeStamp.seconds) * 10000) / 10000
+ videoCaptureLatency = diff
+ }
+
+ func reset() {
+ let screens: [VideoTrackScreenObject] = root.getScreenObjects()
+ for screen in screens {
+ screen.reset()
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift
new file mode 100644
index 000000000..d35dc4754
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift
@@ -0,0 +1,11 @@
+import Foundation
+
+/// A singleton actor whose executor screen object rendering.
+@globalActor
+public actor ScreenActor {
+ /// The shared actor instance.
+ public static let shared = ScreenActor()
+
+ private init() {
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift
new file mode 100644
index 000000000..ec8b8e97c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift
@@ -0,0 +1,162 @@
+import Accelerate
+import AVFoundation
+import CoreImage
+import CoreMedia
+import Foundation
+import VideoToolbox
+
+#if canImport(AppKit)
+import AppKit
+#endif
+
+#if canImport(UIKit)
+import UIKit
+#endif
+
+/// The ScreenObject class is the abstract class for all objects that are rendered on the screen.
+@ScreenActor
+open class ScreenObject {
+ /// The horizontal alignment for the screen object.
+ public enum HorizontalAlignment {
+ /// A guide that marks the left edge of the screen object.
+ case left
+ /// A guide that marks the borizontal center of the screen object.
+ case center
+ /// A guide that marks the right edge of the screen object.
+ case right
+ }
+
+ /// The vertical alignment for the screen object.
+ public enum VerticalAlignment {
+ /// A guide that marks the top edge of the screen object.
+ case top
+ /// A guide that marks the vertical middle of the screen object.
+ case middle
+ /// A guide that marks the bottom edge of the screen object.
+ case bottom
+ }
+
+ enum BlendMode {
+ case normal
+ case alpha
+ }
+
+ /// The screen object container that contains this screen object
+ public internal(set) weak var parent: ScreenObjectContainer?
+
+ /// Specifies the size rectangle.
+ public var size: CGSize = .zero {
+ didSet {
+ guard size != oldValue else {
+ return
+ }
+ shouldInvalidateLayout = true
+ }
+ }
+
+ /// The bounds rectangle.
+ public internal(set) var bounds: CGRect = .zero
+
+ /// Specifies the visibility of the object.
+ public var isVisible = true
+
+ #if os(macOS)
+ /// Specifies the default spacing to laying out content in the screen object.
+ public var layoutMargin: NSEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0)
+ #else
+ /// Specifies the default spacing to laying out content in the screen object.
+ public var layoutMargin: UIEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0)
+ #endif
+
+ /// Specifies the radius to use when drawing rounded corners.
+ public var cornerRadius: CGFloat = 0.0
+
+ /// Specifies the alignment position along the vertical axis.
+ public var verticalAlignment: VerticalAlignment = .top
+
+ /// Specifies the alignment position along the horizontal axis.
+ public var horizontalAlignment: HorizontalAlignment = .left
+
+ var blendMode: BlendMode {
+ .alpha
+ }
+
+ var shouldInvalidateLayout = true
+
+ /// Creates a screen object.
+ public init() {
+ }
+
+ /// Invalidates the current layout and triggers a layout update.
+ public func invalidateLayout() {
+ shouldInvalidateLayout = true
+ }
+
+ /// Makes cgImage for offscreen image.
+ @available(*, deprecated, message: "It will be removed in the next major update. Please migrate to using CIImage instead.")
+ open func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ return nil
+ }
+
+ /// Makes ciImage for offscreen image.
+ open func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ return nil
+ }
+
+ /// Makes screen object bounds for offscreen image.
+ open func makeBounds(_ size: CGSize) -> CGRect {
+ guard let parent else {
+ return .init(origin: .zero, size: self.size)
+ }
+
+ let width = size.width == 0 ? max(parent.bounds.width - layoutMargin.left - layoutMargin.right + size.width, 0) : size.width
+ let height = size.height == 0 ? max(parent.bounds.height - layoutMargin.top - layoutMargin.bottom + size.height, 0) : size.height
+
+ let parentX = parent.bounds.origin.x
+ let parentWidth = parent.bounds.width
+ let x: CGFloat
+ switch horizontalAlignment {
+ case .center:
+ x = parentX + (parentWidth - width) / 2
+ case .left:
+ x = parentX + layoutMargin.left
+ case .right:
+ x = parentX + (parentWidth - width) - layoutMargin.right
+ }
+
+ let parentY = parent.bounds.origin.y
+ let parentHeight = parent.bounds.height
+ let y: CGFloat
+ switch verticalAlignment {
+ case .top:
+ y = parentY + layoutMargin.top
+ case .middle:
+ y = parentY + (parentHeight - height) / 2
+ case .bottom:
+ y = parentY + (parentHeight - height) - layoutMargin.bottom
+ }
+
+ return .init(x: x, y: y, width: width, height: height)
+ }
+
+ func layout(_ renderer: some ScreenRenderer) {
+ bounds = makeBounds(size)
+ renderer.layout(self)
+ shouldInvalidateLayout = false
+ }
+
+ func draw(_ renderer: some ScreenRenderer) {
+ renderer.draw(self)
+ }
+}
+
+extension ScreenObject: Hashable {
+ // MARK: Hashable
+ nonisolated public static func == (lhs: ScreenObject, rhs: ScreenObject) -> Bool {
+ lhs === rhs
+ }
+
+ nonisolated public func hash(into hasher: inout Hasher) {
+ hasher.combine(ObjectIdentifier(self))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift
new file mode 100644
index 000000000..343c47be2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift
@@ -0,0 +1,80 @@
+import AVFoundation
+import Foundation
+
+@ScreenActor
+protocol ScreenObjectContainerConvertible: AnyObject {
+ func addChild(_ child: ScreenObject?) throws
+ func removeChild(_ child: ScreenObject?)
+}
+
+/// An object represents a collection of screen objects.
+public class ScreenObjectContainer: ScreenObject, ScreenObjectContainerConvertible {
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// An error the screen object registry throws when the app registers a screen object twice by the same instance.
+ case alreadyExists
+ }
+
+ /// The total of child counts.
+ public var childCounts: Int {
+ children.count
+ }
+
+ private var children: [ScreenObject] = .init()
+
+ /// Adds the specified screen object as a child of the current screen object container.
+ public func addChild(_ child: ScreenObject?) throws {
+ guard let child, child != self else {
+ return
+ }
+ if child.parent != nil {
+ throw Error.alreadyExists
+ }
+ child.parent = self
+ children.append(child)
+ invalidateLayout()
+ }
+
+ /// Removes the specified screen object as a child of the current screen object container.
+ public func removeChild(_ child: ScreenObject?) {
+ guard let child, child.parent == self else {
+ return
+ }
+ guard let indexOf = children.firstIndex(where: { $0 == child }) else {
+ return
+ }
+ child.parent = nil
+ children.remove(at: indexOf)
+ invalidateLayout()
+ }
+
+ override func layout(_ renderer: some ScreenRenderer) {
+ bounds = makeBounds(size)
+ children.forEach { child in
+ if child.shouldInvalidateLayout || shouldInvalidateLayout {
+ child.layout(renderer)
+ }
+ }
+ shouldInvalidateLayout = false
+ }
+
+ override func draw(_ renderer: some ScreenRenderer) {
+ guard isVisible else {
+ return
+ }
+ children.forEach { child in
+ guard child.isVisible else {
+ return
+ }
+ child.draw(renderer)
+ }
+ }
+
+ func getScreenObjects() -> [T] {
+ var objects = children.compactMap { $0 as? T }
+ children.compactMap { $0 as? ScreenObjectContainer }.forEach {
+ objects += $0.getScreenObjects()
+ }
+ return objects
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift
new file mode 100644
index 000000000..973e3300c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift
@@ -0,0 +1,28 @@
+import AVFoundation
+import CoreImage
+import Foundation
+
+/// A type that renders a screen object.
+@ScreenActor
+public protocol ScreenRenderer: AnyObject {
+ /// The CIContext instance.
+ var context: CIContext { get }
+ /// The CIImage options.
+ var imageOptions: [CIImageOption: Any]? { get }
+ /// Specifies the backgroundColor for output video.
+ var backgroundColor: CGColor { get set }
+ /// The current screen bounds.
+ var bounds: CGRect { get set }
+ /// The current presentationTimeStamp.
+ var presentationTimeStamp: CMTime { get set }
+ /// The current session synchronization clock.
+ var synchronizationClock: CMClock? { get set }
+ /// Layouts a screen object.
+ func layout(_ screenObject: ScreenObject)
+ /// Draws a sceen object.
+ func draw(_ screenObject: ScreenObject)
+ /// Sets up the render target.
+ func setTarget(_ pixelBuffer: CVPixelBuffer?)
+ /// Render a screen to buffer.
+ func render()
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift
new file mode 100644
index 000000000..c52db0115
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift
@@ -0,0 +1,184 @@
+import Accelerate
+import AVFoundation
+import CoreImage
+
+final class ScreenRendererByCPU: ScreenRenderer {
+ static let noFlags = vImage_Flags(kvImageNoFlags)
+ static let doNotTile = vImage_Flags(kvImageDoNotTile)
+
+ var bounds: CGRect = .init(origin: .zero, size: Screen.size)
+ let imageOptions: [CIImageOption: Any]?
+ var synchronizationClock: CMClock?
+ var presentationTimeStamp: CMTime = .zero
+
+ let context: CIContext
+
+ var backgroundColor = CGColor(red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00) {
+ didSet {
+ guard backgroundColor != oldValue, let components = backgroundColor.components else {
+ return
+ }
+ switch components.count {
+ case 2:
+ backgroundColorUInt8Array = [
+ UInt8(components[1] * 255),
+ UInt8(components[0] * 255),
+ UInt8(components[0] * 255),
+ UInt8(components[0] * 255)
+ ]
+ case 3:
+ backgroundColorUInt8Array = [
+ UInt8(components[2] * 255),
+ UInt8(components[0] * 255),
+ UInt8(components[1] * 255),
+ UInt8(components[1] * 255)
+ ]
+ case 4:
+ backgroundColorUInt8Array = [
+ UInt8(components[3] * 255),
+ UInt8(components[0] * 255),
+ UInt8(components[1] * 255),
+ UInt8(components[2] * 255)
+ ]
+ default:
+ break
+ }
+ }
+ }
+
+ private var format = vImage_CGImageFormat(
+ bitsPerComponent: 8,
+ bitsPerPixel: 32,
+ colorSpace: nil,
+ bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue),
+ version: 0,
+ decode: nil,
+ renderingIntent: .defaultIntent)
+
+ private var images: [ScreenObject: vImage_Buffer] = [:]
+ private var canvas: vImage_Buffer = .init()
+ private var converter: vImageConverter?
+ private var shapeFactory = ShapeFactory()
+ private var pixelFormatType: OSType? {
+ didSet {
+ guard pixelFormatType != oldValue else {
+ return
+ }
+ converter = nil
+ }
+ }
+ private var backgroundColorUInt8Array: [UInt8] = [0x00, 0x00, 0x00, 0x00]
+ private lazy var choromaKeyProcessor: ChromaKeyProcessor? = {
+ return try? ChromaKeyProcessor()
+ }()
+
+ init(dynamicRangeMode: DynamicRangeMode) {
+ context = dynamicRangeMode.makeCIContext()
+ if let colorSpace = dynamicRangeMode.colorSpace {
+ imageOptions = [.colorSpace: colorSpace]
+ } else {
+ imageOptions = nil
+ }
+ }
+
+ func setTarget(_ pixelBuffer: CVPixelBuffer?) {
+ guard let pixelBuffer else {
+ return
+ }
+ pixelFormatType = pixelBuffer.pixelFormatType
+ if converter == nil {
+ let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(pixelBuffer).takeRetainedValue()
+ vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB())
+ converter = try? vImageConverter.make(
+ sourceFormat: cvImageFormat,
+ destinationFormat: format
+ )
+ }
+ guard let converter else {
+ return
+ }
+ vImageBuffer_InitForCopyFromCVPixelBuffer(
+ &canvas,
+ converter,
+ pixelBuffer,
+ vImage_Flags(kvImageNoAllocate)
+ )
+ switch pixelFormatType {
+ case kCVPixelFormatType_32ARGB:
+ vImageBufferFill_ARGB8888(
+ &canvas,
+ &backgroundColorUInt8Array,
+ vImage_Flags(kvImageNoFlags)
+ )
+ default:
+ break
+ }
+ }
+
+ func layout(_ screenObject: ScreenObject) {
+ autoreleasepool {
+ guard let image: CGImage = screenObject.makeImage(self) else {
+ return
+ }
+ do {
+ images[screenObject]?.free()
+ var buffer = try vImage_Buffer(cgImage: image, format: format)
+ images[screenObject] = buffer
+ if 0 < screenObject.cornerRadius {
+ if var mask = shapeFactory.cornerRadius(image.size, cornerRadius: screenObject.cornerRadius) {
+ vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags)
+ }
+ } else {
+ if let screenObject = screenObject as? (any ChromaKeyProcessable),
+ let chromaKeyColor = screenObject.chromaKeyColor,
+ var mask = try choromaKeyProcessor?.makeMask(&buffer, chromeKeyColor: chromaKeyColor) {
+ vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags)
+ }
+ }
+ } catch {
+ logger.error(error)
+ }
+ }
+ }
+
+ func draw(_ screenObject: ScreenObject) {
+ guard var image = images[screenObject] else {
+ return
+ }
+
+ let origin = screenObject.bounds.origin
+ let start = Int(max(0, origin.y)) * canvas.rowBytes + Int(max(0, origin.x)) * 4
+
+ var destination = vImage_Buffer(
+ data: canvas.data.advanced(by: start),
+ height: image.height,
+ width: image.width,
+ rowBytes: canvas.rowBytes
+ )
+
+ switch pixelFormatType {
+ case kCVPixelFormatType_32ARGB:
+ switch screenObject.blendMode {
+ case .normal:
+ vImageCopyBuffer(
+ &image,
+ &destination,
+ 4,
+ Self.doNotTile
+ )
+ case .alpha:
+ vImageAlphaBlend_ARGB8888(
+ &image,
+ &destination,
+ &destination,
+ Self.doNotTile
+ )
+ }
+ default:
+ break
+ }
+ }
+
+ func render() {
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift
new file mode 100644
index 000000000..a214ddcef
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift
@@ -0,0 +1,87 @@
+import Accelerate
+import AVFoundation
+import CoreImage
+import CoreImage.CIFilterBuiltins
+
+final class ScreenRendererByGPU: ScreenRenderer {
+ var bounds: CGRect = .init(origin: .zero, size: Screen.size)
+ let imageOptions: [CIImageOption: Any]?
+ var synchronizationClock: CMClock?
+ var presentationTimeStamp: CMTime = .zero
+
+ let context: CIContext
+
+ var backgroundColor = CGColor(red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00) {
+ didSet {
+ guard backgroundColor != oldValue else {
+ return
+ }
+ backgroundCIColor = CIColor(cgColor: backgroundColor)
+ }
+ }
+
+ private var canvas: CIImage = .init()
+ private var images: [ScreenObject: CIImage] = [:]
+ private var pixelBuffer: CVPixelBuffer?
+ private let dynamicRangeMode: DynamicRangeMode
+ private var backgroundCIColor = CIColor()
+ private var roundedRectangleFactory = RoundedRectangleFactory()
+
+ init(dynamicRangeMode: DynamicRangeMode) {
+ self.dynamicRangeMode = dynamicRangeMode
+ context = dynamicRangeMode.makeCIContext()
+ if let colorSpace = dynamicRangeMode.colorSpace {
+ imageOptions = [.colorSpace: colorSpace]
+ } else {
+ imageOptions = nil
+ }
+ }
+
+ func setTarget(_ pixelBuffer: CVPixelBuffer?) {
+ guard let pixelBuffer else {
+ return
+ }
+ self.pixelBuffer = pixelBuffer
+ canvas = CIImage(color: backgroundCIColor).cropped(to: bounds)
+ }
+
+ func layout(_ screenObject: ScreenObject) {
+ guard let image: CIImage = screenObject.makeImage(self) else {
+ return
+ }
+ if 0 < screenObject.cornerRadius {
+ if let mask = roundedRectangleFactory.cornerRadius(screenObject.bounds.size, cornerRadius: screenObject.cornerRadius) {
+ images[screenObject] = image.applyingFilter("CIBlendWithAlphaMask", parameters: [
+ "inputMaskImage": mask
+ ])
+ } else {
+ images[screenObject] = image
+ }
+ } else {
+ images[screenObject] = image
+ }
+ }
+
+ func draw(_ screenObject: ScreenObject) {
+ guard let image = images[screenObject] else {
+ return
+ }
+ let origin = screenObject.bounds.origin
+ if origin.x == 0 && origin.y == 0 {
+ canvas = image
+ .composited(over: canvas)
+ } else {
+ canvas = image
+ .transformed(by: .init(translationX: origin.x, y: bounds.height - origin.y - screenObject.bounds.height))
+ .composited(over: canvas)
+ }
+ }
+
+ func render() {
+ guard let pixelBuffer else {
+ return
+ }
+ context.render(canvas, to: pixelBuffer, bounds: canvas.extent, colorSpace: dynamicRangeMode.colorSpace)
+ dynamicRangeMode.attach(pixelBuffer)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift
new file mode 100644
index 000000000..b255f022a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift
@@ -0,0 +1,40 @@
+import Accelerate
+import Foundation
+
+#if canImport(AppKit)
+import AppKit
+#endif
+
+#if canImport(UIKit)
+import UIKit
+#endif
+
+final class RoundedSquareShape: Shape {
+ var rect: CGRect = .zero
+ var cornerRadius: CGFloat = .zero
+
+ func makeCGImage() -> CGImage? {
+ guard let context = CGContext(
+ data: nil,
+ width: Int(rect.width),
+ height: Int(rect.height),
+ bitsPerComponent: 8,
+ bytesPerRow: Int(rect.width),
+ space: CGColorSpaceCreateDeviceGray(),
+ bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue).rawValue
+ ) else {
+ return nil
+ }
+ let path = CGPath(roundedRect: rect, cornerWidth: cornerRadius, cornerHeight: cornerRadius, transform: nil)
+ #if canImport(AppKit) && !targetEnvironment(macCatalyst)
+ context.setFillColor(NSColor.white.cgColor)
+ #endif
+ #if canImport(UIKit)
+ context.setFillColor(UIColor.white.cgColor)
+ #endif
+ context.addPath(path)
+ context.closePath()
+ context.fillPath()
+ return context.makeImage()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift
new file mode 100644
index 000000000..5b5b70aaa
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift
@@ -0,0 +1,34 @@
+import Accelerate
+import CoreMedia
+import Foundation
+
+protocol Shape {
+ func makeCGImage() -> CGImage?
+}
+
+final class ShapeFactory {
+ private var imageBuffers: [String: vImage_Buffer] = [:]
+ private var roundedSquareShape = RoundedSquareShape()
+
+ func cornerRadius(_ size: CGSize, cornerRadius: CGFloat) -> vImage_Buffer? {
+ let key = "\(size.width):\(size.height):\(cornerRadius)"
+ if let buffer = imageBuffers[key] {
+ return buffer
+ }
+ roundedSquareShape.rect = .init(origin: .zero, size: size)
+ roundedSquareShape.cornerRadius = cornerRadius
+ guard
+ let image = roundedSquareShape.makeCGImage() else {
+ return nil
+ }
+ imageBuffers[key] = try? vImage_Buffer(cgImage: image)
+ return imageBuffers[key]
+ }
+
+ func removeAll() {
+ for buffer in imageBuffers.values {
+ buffer.free()
+ }
+ imageBuffers.removeAll()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift
new file mode 100644
index 000000000..47086ea88
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift
@@ -0,0 +1,102 @@
+import AVFoundation
+import CoreGraphics
+import CoreImage
+import Foundation
+
+/// An object that manages offscreen rendering a streaming video track source.
+///
+/// ## Usage
+/// ```swift
+/// var streamScreenObject = StreamScreenObject()
+///
+/// Task {
+/// // Register to the Stream's Output observer.
+/// stream.addOutput(streamScreenObject)
+/// stream.play("yourStreamName")
+/// }
+///
+/// Task { @ScreenActor in
+/// streamScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
+/// streamScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
+///
+/// try? await mixer.screen.addChild(streamScreenObject)
+/// }
+/// ```
+public final class StreamScreenObject: ScreenObject, ChromaKeyProcessable {
+ public var chromaKeyColor: CGColor?
+
+ /// The video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect {
+ didSet {
+ guard videoGravity != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ private var sampleBuffer: CMSampleBuffer? {
+ didSet {
+ guard sampleBuffer != oldValue else {
+ return
+ }
+ if sampleBuffer == nil {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ override var blendMode: ScreenObject.BlendMode {
+ if 0.0 < cornerRadius || chromaKeyColor != nil {
+ return .alpha
+ }
+ return .normal
+ }
+
+ override public func makeBounds(_ size: CGSize) -> CGRect {
+ guard parent != nil, let image = sampleBuffer?.formatDescription?.dimensions.size else {
+ return super.makeBounds(size)
+ }
+ let bounds = super.makeBounds(size)
+ switch videoGravity {
+ case .resizeAspect:
+ let scale = min(bounds.size.width / image.width, bounds.size.height / image.height)
+ let scaleSize = CGSize(width: image.width * scale, height: image.height * scale)
+ return super.makeBounds(scaleSize)
+ case .resizeAspectFill:
+ return bounds
+ default:
+ return bounds
+ }
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ guard let image: CIImage = makeImage(renderer) else {
+ return nil
+ }
+ return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent))
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else {
+ return nil
+ }
+ return CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale(
+ bounds.size,
+ image: pixelBuffer.size
+ ))
+ }
+}
+
+extension StreamScreenObject: StreamOutput {
+ // MARK: HKStreamOutput
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ }
+
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) {
+ Task { @ScreenActor in
+ self.sampleBuffer = video
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift
new file mode 100644
index 000000000..5fcc60a84
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift
@@ -0,0 +1,99 @@
+#if canImport(AppKit)
+import AppKit
+#endif
+
+#if canImport(UIKit)
+import UIKit
+#endif
+
+/// An object that manages offscreen rendering a text source.
+public final class TextScreenObject: ScreenObject {
+ /// Specifies the text value.
+ public var string: String = "" {
+ didSet {
+ guard string != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ #if os(macOS)
+ /// Specifies the attributes for strings.
+ public var attributes: [NSAttributedString.Key: Any]? = [
+ .font: NSFont.boldSystemFont(ofSize: 32),
+ .foregroundColor: NSColor.white
+ ] {
+ didSet {
+ invalidateLayout()
+ }
+ }
+ #else
+ /// Specifies the attributes for strings.
+ public var attributes: [NSAttributedString.Key: Any]? = [
+ .font: UIFont.boldSystemFont(ofSize: 32),
+ .foregroundColor: UIColor.white
+ ] {
+ didSet {
+ invalidateLayout()
+ }
+ }
+ #endif
+
+ override public var bounds: CGRect {
+ didSet {
+ guard bounds != oldValue else {
+ return
+ }
+ context = CGContext(
+ data: nil,
+ width: Int(bounds.width),
+ height: Int(bounds.height),
+ bitsPerComponent: 8,
+ bytesPerRow: Int(bounds.width) * 4,
+ space: CGColorSpaceCreateDeviceRGB(),
+ bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue).rawValue
+ )
+ }
+ }
+
+ private var context: CGContext?
+ private var framesetter: CTFramesetter?
+
+ override public func makeBounds(_ size: CGSize) -> CGRect {
+ guard !string.isEmpty else {
+ self.framesetter = nil
+ return .zero
+ }
+ let bounds = super.makeBounds(size)
+ let attributedString = NSAttributedString(string: string, attributes: attributes)
+ let framesetter = CTFramesetterCreateWithAttributedString(attributedString)
+ let frameSize = CTFramesetterSuggestFrameSizeWithConstraints(
+ framesetter,
+ .init(),
+ nil,
+ bounds.size,
+ nil
+ )
+ self.framesetter = framesetter
+ return super.makeBounds(frameSize)
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ guard let context, let framesetter else {
+ return nil
+ }
+ let path = CGPath(rect: .init(origin: .zero, size: bounds.size), transform: nil)
+ let frame = CTFramesetterCreateFrame(framesetter, .init(), path, nil)
+ context.clear(context.boundingBoxOfPath)
+ CTFrameDraw(frame, context)
+ return context.makeImage()
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ guard let image: CGImage = makeImage(renderer) else {
+ return nil
+ }
+ return CIImage(cgImage: image)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift
new file mode 100644
index 000000000..95e189659
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift
@@ -0,0 +1,27 @@
+import AVFoundation
+import CoreImage
+import Foundation
+
+/// An object that apply a video effect.
+/// - seealso:[Processing an Image Using Built-in Filters](https://developer.apple.com/documentation/coreimage/processing_an_image_using_built-in_filters)
+///
+/// ## Example code:
+/// ```swift
+/// final class MonochromeEffect: VideoEffect {
+/// let filter: CIFilter? = CIFilter(name: "CIColorMonochrome")
+///
+/// func execute(_ image: CIImage) -> CIImage {
+/// guard let filter: CIFilter = filter else {
+/// return image
+/// }
+/// filter.setValue(image, forKey: "inputImage")
+/// filter.setValue(CIColor(red: 0.75, green: 0.75, blue: 0.75), forKey: "inputColor")
+/// filter.setValue(1.0, forKey: "inputIntensity")
+/// return filter.outputImage ?? image
+/// }
+/// }
+/// ```
+public protocol VideoEffect: AnyObject {
+ /// Executes to apply a video effect.
+ func execute(_ image: CIImage) -> CIImage
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift
new file mode 100644
index 000000000..4f95f056c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift
@@ -0,0 +1,139 @@
+import AVFoundation
+import CoreImage
+
+/// An object that manages offscreen rendering a video track source.
+public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
+ static let capacity: Int = 3
+ public var chromaKeyColor: CGColor?
+
+ /// Specifies the track number how the displays the visual content.
+ public var track: UInt8 = 0 {
+ didSet {
+ guard track != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ /// A value that specifies how the video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect {
+ didSet {
+ guard videoGravity != oldValue else {
+ return
+ }
+ invalidateLayout()
+ }
+ }
+
+ /// The frame rate.
+ public var frameRate: Int {
+ frameTracker.frameRate
+ }
+
+ override var blendMode: ScreenObject.BlendMode {
+ if 0.0 < cornerRadius || chromaKeyColor != nil {
+ return .alpha
+ }
+ return .normal
+ }
+
+ private var queue: TypedBlockQueue?
+ private var effects: [any VideoEffect] = .init()
+ private var frameTracker = FrameTracker()
+
+ /// Create a screen object.
+ override public init() {
+ super.init()
+ do {
+ queue = try TypedBlockQueue(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers)
+ } catch {
+ logger.error(error)
+ }
+ Task {
+ horizontalAlignment = .center
+ }
+ }
+
+ /// Registers a video effect.
+ public func registerVideoEffect(_ effect: some VideoEffect) -> Bool {
+ if effects.contains(where: { $0 === effect }) {
+ return false
+ }
+ effects.append(effect)
+ return true
+ }
+
+ /// Unregisters a video effect.
+ public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool {
+ if let index = effects.firstIndex(where: { $0 === effect }) {
+ effects.remove(at: index)
+ return true
+ }
+ return false
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
+ guard let image: CIImage = makeImage(renderer) else {
+ return nil
+ }
+ return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent))
+ }
+
+ override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? {
+ let presentationTimeStamp = renderer.presentationTimeStamp.convertTime(from: CMClockGetHostTimeClock(), to: renderer.synchronizationClock)
+ guard let sampleBuffer = queue?.dequeue(presentationTimeStamp),
+ let pixelBuffer = sampleBuffer.imageBuffer else {
+ return nil
+ }
+ frameTracker.update(sampleBuffer.presentationTimeStamp)
+ // Resizing before applying the filter for performance optimization.
+ var image = CIImage(cvPixelBuffer: pixelBuffer, options: renderer.imageOptions).transformed(by: videoGravity.scale(
+ bounds.size,
+ image: pixelBuffer.size
+ ))
+ if effects.isEmpty {
+ return image
+ } else {
+ for effect in effects {
+ image = effect.execute(image)
+ }
+ return image
+ }
+ }
+
+ override public func makeBounds(_ size: CGSize) -> CGRect {
+ guard parent != nil, let image = queue?.head?.formatDescription?.dimensions.size else {
+ return super.makeBounds(size)
+ }
+ let bounds = super.makeBounds(size)
+ switch videoGravity {
+ case .resizeAspect:
+ let scale = min(bounds.size.width / image.width, bounds.size.height / image.height)
+ let scaleSize = CGSize(width: image.width * scale, height: image.height * scale)
+ return super.makeBounds(scaleSize)
+ case .resizeAspectFill:
+ return bounds
+ default:
+ return bounds
+ }
+ }
+
+ override public func draw(_ renderer: some ScreenRenderer) {
+ super.draw(renderer)
+ if queue?.isEmpty == false {
+ invalidateLayout()
+ }
+ }
+
+ func enqueue(_ sampleBuffer: CMSampleBuffer) {
+ try? queue?.enqueue(sampleBuffer)
+ invalidateLayout()
+ }
+
+ func reset() {
+ frameTracker.clear()
+ try? queue?.reset()
+ invalidateLayout()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift
new file mode 100644
index 000000000..4e63ef4c0
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift
@@ -0,0 +1,45 @@
+import Foundation
+
+package let kSession_maxRetryCount: Int = 3
+
+/// Represents the type of session to establish.
+public enum SessionMode: Sendable {
+ /// A publishing session, used to stream media from the local device to a server or peers.
+ case publish
+ /// A playback session, used to receive and play media streamed from a server or peers.
+ case playback
+}
+
+/// Represents the current connection state of a session.
+public enum SessionReadyState: Int, Sendable {
+ /// The session is currently attempting to establish a connection.
+ case connecting
+ /// The session has been successfully established and is ready for communication.
+ case open
+ /// The session is in the process of closing the connection.
+ case closing
+ /// The session has been closed or could not be established.
+ case closed
+}
+
+/// A type that represents a foundation of streaming session.
+///
+/// It is designed so that various streaming services can be used through a common API.
+/// While coding with the conventional Connection offered flexibility,
+/// it also required a certain level of maturity in properly handling network communication.
+public protocol Session: NetworkConnection {
+ /// The current ready state.
+ var readyState: AsyncStream { get }
+
+ /// The stream instance.
+ var stream: any StreamConvertible { get async }
+
+ /// Creates a new session with uri.
+ init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?)
+
+ /// Sets a max retry count.
+ func setMaxRetryCount(_ maxRetryCount: Int)
+
+ /// Creates a connection to the server.
+ func connect(_ disconnected: @Sendable @escaping () -> Void) async throws
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift
new file mode 100644
index 000000000..92dcd9e19
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift
@@ -0,0 +1,31 @@
+import Foundation
+
+/// An actor that provides builder for Session object.
+public actor SessionBuilder {
+ private let factory: SessionBuilderFactory
+ private let uri: URL
+ private var mode: SessionMode = .publish
+ private var configuration: (any SessionConfiguration)?
+
+ init(factory: SessionBuilderFactory, uri: URL) {
+ self.factory = factory
+ self.uri = uri
+ }
+
+ /// Sets a method.
+ public func setMode(_ mode: SessionMode) -> Self {
+ self.mode = mode
+ return self
+ }
+
+ /// Sets a config.
+ public func setConfiguration(_ configuration: (any SessionConfiguration)?) -> Self {
+ self.configuration = configuration
+ return self
+ }
+
+ /// Creates a Session instance with the specified fields.
+ public func build() async throws -> (any Session)? {
+ return try await factory.build(uri, method: mode, configuration: configuration)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift
new file mode 100644
index 000000000..0ebf03fe2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift
@@ -0,0 +1,56 @@
+import Foundation
+
+/// An actor that provides a factory to create a SessionBuifer.
+///
+/// ## Prerequisites
+/// You need to register the factory in advance as follows.
+/// ```swift
+/// import RTMPHaishinKit
+/// import SRTHaishinKit
+///
+/// await SessionBuilderFactory.shared.register(RTMPSessionFactory())
+/// await SessionBuilderFactory.shared.register(SRTSessionFactory())
+/// ```
+public actor SessionBuilderFactory {
+ /// The shared instance.
+ public static let shared = SessionBuilderFactory()
+
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// An illegal argument.
+ case illegalArgument
+ /// The factory can't find a SessionBuilder.
+ case notFound
+ }
+
+ private var factories: [any SessionFactory] = []
+
+ private init() {
+ }
+
+ /// Makes a new session builder.
+ public func make(_ uri: URL?) throws -> SessionBuilder {
+ guard let uri else {
+ throw Error.illegalArgument
+ }
+ return SessionBuilder(factory: self, uri: uri)
+ }
+
+ /// Registers a factory.
+ public func register(_ factory: some SessionFactory) {
+ guard !factories.contains(where: { $0.supportedProtocols == factory.supportedProtocols }) else {
+ return
+ }
+ factories.append(factory)
+ }
+
+ func build(_ uri: URL?, method: SessionMode, configuration: (any SessionConfiguration)?) throws -> (any Session) {
+ guard let uri else {
+ throw Error.illegalArgument
+ }
+ for factory in factories where factory.supportedProtocols.contains(uri.scheme ?? "") {
+ return factory.make(uri, mode: method, configuration: configuration)
+ }
+ throw Error.notFound
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift
new file mode 100644
index 000000000..91e902756
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift
@@ -0,0 +1,4 @@
+import Foundation
+
+public protocol SessionConfiguration: Encodable, Sendable {
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift
new file mode 100644
index 000000000..a96277c2b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift
@@ -0,0 +1,10 @@
+import Foundation
+
+/// A type that represents a streaming session factory.
+public protocol SessionFactory {
+ /// The supported protocols.
+ var supportedProtocols: Set { get }
+
+ /// Makes a new session by uri.
+ func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift
new file mode 100644
index 000000000..4f75cbc19
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift
@@ -0,0 +1,51 @@
+@preconcurrency import AVFoundation
+
+/// An object that provides the interface to control audio playback.
+public final actor AudioPlayer {
+ private var connected: [AudioPlayerNode: Bool] = [:]
+ private var audioEngine: AVAudioEngine?
+ private var playerNodes: [AudioPlayerNode: AVAudioPlayerNode] = [:]
+
+ /// Create an audio player object.
+ public init(audioEngine: AVAudioEngine) {
+ self.audioEngine = audioEngine
+ }
+
+ func isConnected(_ playerNode: AudioPlayerNode) -> Bool {
+ return connected[playerNode] == true
+ }
+
+ func connect(_ playerNode: AudioPlayerNode, format: AVAudioFormat?) {
+ guard let audioEngine, let avPlayerNode = playerNodes[playerNode] else {
+ return
+ }
+ if let format {
+ audioEngine.connect(avPlayerNode, to: audioEngine.outputNode, format: format)
+ if !audioEngine.isRunning {
+ try? audioEngine.start()
+ }
+ connected[playerNode] = true
+ } else {
+ if audioEngine.isRunning {
+ audioEngine.stop()
+ }
+ audioEngine.disconnectNodeOutput(avPlayerNode)
+ connected[playerNode] = nil
+ }
+ }
+
+ func detach(_ playerNode: AudioPlayerNode) {
+ if let playerNode = playerNodes[playerNode] {
+ audioEngine?.detach(playerNode)
+ }
+ playerNodes[playerNode] = nil
+ }
+
+ func makePlayerNode() -> AudioPlayerNode {
+ let avAudioPlayerNode = AVAudioPlayerNode()
+ audioEngine?.attach(avAudioPlayerNode)
+ let playerNode = AudioPlayerNode(player: self, playerNode: avAudioPlayerNode)
+ playerNodes[playerNode] = avAudioPlayerNode
+ return playerNode
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift
new file mode 100644
index 000000000..1b6cc92a4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift
@@ -0,0 +1,107 @@
+@preconcurrency import AVFoundation
+import Foundation
+
+final actor AudioPlayerNode {
+ static let bufferCounts: Int = 10
+
+ var currentTime: TimeInterval {
+ if playerNode.isPlaying {
+ guard
+ let nodeTime = playerNode.lastRenderTime,
+ let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else {
+ return 0.0
+ }
+ return TimeInterval(playerTime.sampleTime) / playerTime.sampleRate
+ }
+ return 0.0
+ }
+ private(set) var isPaused = false
+ private(set) var isRunning = false
+ private(set) var soundTransfrom = SoundTransform()
+ private let playerNode: AVAudioPlayerNode
+ private var audioTime = AudioTime()
+ private var scheduledAudioBuffers: Int = 0
+ private var isBuffering = true
+ private weak var player: AudioPlayer?
+ private var format: AVAudioFormat? {
+ didSet {
+ guard format != oldValue else {
+ return
+ }
+ Task { [format] in
+ await player?.connect(self, format: format)
+ }
+ }
+ }
+
+ init(player: AudioPlayer, playerNode: AVAudioPlayerNode) {
+ self.player = player
+ self.playerNode = playerNode
+ }
+
+ func setSoundTransfrom(_ soundTransfrom: SoundTransform) {
+ soundTransfrom.apply(playerNode)
+ }
+
+ func enqueue(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) async {
+ format = audioBuffer.format
+ guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer, await player?.isConnected(self) == true else {
+ return
+ }
+ if !audioTime.hasAnchor {
+ audioTime.anchor(playerNode.lastRenderTime ?? AVAudioTime(hostTime: 0))
+ }
+ scheduledAudioBuffers += 1
+ if !isPaused && !playerNode.isPlaying && Self.bufferCounts <= scheduledAudioBuffers {
+ playerNode.play()
+ }
+ Task {
+ audioTime.advanced(Int64(audioBuffer.frameLength))
+ await playerNode.scheduleBuffer(audioBuffer, at: audioTime.at)
+ scheduledAudioBuffers -= 1
+ if scheduledAudioBuffers == 0 {
+ isBuffering = true
+ }
+ }
+ }
+
+ func detach() async {
+ stopRunning()
+ await player?.detach(self)
+ }
+}
+
+extension AudioPlayerNode: AsyncRunner {
+ // MARK: AsyncRunner
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ scheduledAudioBuffers = 0
+ isRunning = true
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ if playerNode.isPlaying {
+ playerNode.stop()
+ playerNode.reset()
+ }
+ audioTime.reset()
+ format = nil
+ isRunning = false
+ }
+}
+
+extension AudioPlayerNode: Hashable {
+ // MARK: Hashable
+ nonisolated public static func == (lhs: AudioPlayerNode, rhs: AudioPlayerNode) -> Bool {
+ lhs === rhs
+ }
+
+ nonisolated public func hash(into hasher: inout Hasher) {
+ hasher.combine(ObjectIdentifier(self))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift
new file mode 100644
index 000000000..c7e77114d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift
@@ -0,0 +1,91 @@
+@preconcurrency import AVFoundation
+import Foundation
+
+/// An actor that provides a stream playback feature.
+package final actor IncomingStream {
+ public private(set) var isRunning = false
+ /// The sound transform value control.
+ public var soundTransfrom: SoundTransform? {
+ get async {
+ return await audioPlayerNode?.soundTransfrom
+ }
+ }
+ private lazy var mediaLink = MediaLink()
+ private lazy var audioCodec = AudioCodec()
+ private lazy var videoCodec = VideoCodec()
+ private weak var stream: (any StreamConvertible)?
+ private var audioPlayerNode: AudioPlayerNode?
+
+ /// Creates a new instance.
+ public init(_ stream: some StreamConvertible) {
+ self.stream = stream
+ }
+
+ /// Sets the sound transform value control.
+ public func setSoundTransform(_ soundTransfrom: SoundTransform) async {
+ await audioPlayerNode?.setSoundTransfrom(soundTransfrom)
+ }
+
+ /// Appends a sample buffer for playback.
+ public func append(_ buffer: CMSampleBuffer) {
+ switch buffer.formatDescription?.mediaType {
+ case .audio:
+ audioCodec.append(buffer)
+ case .video:
+ videoCodec.append(buffer)
+ default:
+ break
+ }
+ }
+
+ /// Appends an audio buffer for playback.
+ public func append(_ buffer: AVAudioBuffer, when: AVAudioTime) {
+ audioCodec.append(buffer, when: when)
+ }
+
+ /// Attaches an audio player.
+ public func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async {
+ await audioPlayerNode?.detach()
+ audioPlayerNode = await audioPlayer?.makePlayerNode()
+ await mediaLink.setAudioPlayer(audioPlayerNode)
+ }
+}
+
+extension IncomingStream: AsyncRunner {
+ // MARK: AsyncRunner
+ public func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ audioCodec.settings.format = .pcm
+ videoCodec.startRunning()
+ audioCodec.startRunning()
+ isRunning = true
+ // Deliver decoded video frames directly to the stream without MediaLink pacing.
+ // MediaLink uses a display-link choreographer that adds an extra buffering/pacing
+ // layer. For recvonly WebRTC streams feeding into an external buffered pipeline
+ // (like Moblin's BufferedVideo), this double-pacing causes frame drops and jitter.
+ Task {
+ for await video in videoCodec.outputStream {
+ await stream?.append(video)
+ }
+ }
+ Task {
+ await audioPlayerNode?.startRunning()
+ for await audio in audioCodec.outputStream {
+ await audioPlayerNode?.enqueue(audio.0, when: audio.1)
+ await stream?.append(audio.0, when: audio.1)
+ }
+ }
+ }
+
+ public func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ videoCodec.stopRunning()
+ audioCodec.stopRunning()
+ Task { await audioPlayerNode?.stopRunning() }
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift
new file mode 100644
index 000000000..4d83ace89
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift
@@ -0,0 +1,104 @@
+import CoreMedia
+import Foundation
+
+final actor MediaLink {
+ static let capacity = 90
+ static let duration: TimeInterval = 0.0
+
+ var dequeue: AsyncStream {
+ AsyncStream { continutation in
+ self.continutation = continutation
+ }
+ }
+ private(set) var isRunning = false
+ private var storage: TypedBlockQueue?
+ private var continutation: AsyncStream.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+ private var duration: TimeInterval = MediaLink.duration
+ private var presentationTimeStampOrigin: CMTime = .invalid
+ private lazy var displayLink = DisplayLinkChoreographer()
+ private weak var audioPlayer: AudioPlayerNode?
+
+ init() {
+ do {
+ storage = try .init(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers)
+ } catch {
+ logger.error(error)
+ }
+ }
+
+ func enqueue(_ sampleBuffer: CMSampleBuffer) {
+ guard isRunning else {
+ return
+ }
+ if presentationTimeStampOrigin == .invalid {
+ presentationTimeStampOrigin = sampleBuffer.presentationTimeStamp
+ }
+ do {
+ try storage?.enqueue(sampleBuffer)
+ } catch {
+ logger.error(error)
+ }
+ }
+
+ func setAudioPlayer(_ audioPlayer: AudioPlayerNode?) {
+ self.audioPlayer = audioPlayer
+ }
+
+ private func getCurrentTime(_ timestamp: TimeInterval) async -> TimeInterval {
+ defer {
+ duration += timestamp
+ }
+ return await audioPlayer?.currentTime ?? duration
+ }
+}
+
+extension MediaLink: AsyncRunner {
+ // MARK: AsyncRunner
+ func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ isRunning = true
+ duration = 0.0
+ displayLink.startRunning()
+ Task {
+ for await currentTime in displayLink.updateFrames {
+ guard let storage else {
+ continue
+ }
+ let currentTime = await getCurrentTime(currentTime.targetTimestamp - currentTime.timestamp)
+ var frameCount = 0
+ while !storage.isEmpty {
+ guard let first = storage.head else {
+ break
+ }
+ if first.presentationTimeStamp.seconds - presentationTimeStampOrigin.seconds <= currentTime {
+ continutation?.yield(first)
+ frameCount += 1
+ _ = storage.dequeue()
+ } else {
+ if 2 < frameCount {
+ logger.info("droppedFrame: \(frameCount)")
+ }
+ break
+ }
+ }
+ }
+ }
+ }
+
+ func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ continutation = nil
+ displayLink.stopRunning()
+ presentationTimeStampOrigin = .invalid
+ try? storage?.reset()
+ isRunning = false
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift
new file mode 100644
index 000000000..ec254cb31
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift
@@ -0,0 +1,118 @@
+import AVFoundation
+import Foundation
+
+/// An object that provides a stream ingest feature.
+package final class OutgoingStream {
+ package private(set) var isRunning = false
+
+ /// The asynchronous sequence for audio output.
+ package var audioOutputStream: AsyncStream<(AVAudioBuffer, AVAudioTime)> {
+ return audioCodec.outputStream
+ }
+
+ /// Specifies the audio compression properties.
+ package var audioSettings: AudioCodecSettings {
+ get {
+ audioCodec.settings
+ }
+ set {
+ audioCodec.settings = newValue
+ }
+ }
+
+ /// The audio input format.
+ package private(set) var audioInputFormat: CMFormatDescription?
+
+ /// The asynchronous sequence for video output.
+ package var videoOutputStream: AsyncStream {
+ return videoCodec.outputStream
+ }
+
+ /// Specifies the video compression properties.
+ package var videoSettings: VideoCodecSettings {
+ get {
+ videoCodec.settings
+ }
+ set {
+ videoCodec.settings = newValue
+ }
+ }
+
+ /// Specifies the video buffering count.
+ package var videoInputBufferCounts = -1
+
+ /// The asynchronous sequence for video input buffer.
+ package var videoInputStream: AsyncStream {
+ if 0 < videoInputBufferCounts {
+ return AsyncStream(CMSampleBuffer.self, bufferingPolicy: .bufferingNewest(videoInputBufferCounts)) { continuation in
+ self.videoInputContinuation = continuation
+ }
+ } else {
+ return AsyncStream { continuation in
+ self.videoInputContinuation = continuation
+ }
+ }
+ }
+
+ /// The video input format.
+ package private(set) var videoInputFormat: CMFormatDescription?
+
+ private var audioCodec = AudioCodec()
+ private var videoCodec = VideoCodec()
+ private var videoInputContinuation: AsyncStream.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+
+ /// Create a new instance.
+ package init() {
+ }
+
+ /// Appends a sample buffer for publish.
+ package func append(_ sampleBuffer: CMSampleBuffer) {
+ switch sampleBuffer.formatDescription?.mediaType {
+ case .audio:
+ audioInputFormat = sampleBuffer.formatDescription
+ audioCodec.append(sampleBuffer)
+ case .video:
+ videoInputFormat = sampleBuffer.formatDescription
+ videoInputContinuation?.yield(sampleBuffer)
+ default:
+ break
+ }
+ }
+
+ /// Appends a sample buffer for publish.
+ package func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
+ audioInputFormat = audioBuffer.format.formatDescription
+ audioCodec.append(audioBuffer, when: when)
+ }
+
+ /// Appends a video buffer.
+ package func append(video sampleBuffer: CMSampleBuffer) {
+ videoCodec.append(sampleBuffer)
+ }
+}
+
+extension OutgoingStream: Runner {
+ // MARK: Runner
+ package func startRunning() {
+ guard !isRunning else {
+ return
+ }
+ videoCodec.startRunning()
+ audioCodec.startRunning()
+ isRunning = true
+ }
+
+ package func stopRunning() {
+ guard isRunning else {
+ return
+ }
+ isRunning = false
+ videoCodec.stopRunning()
+ audioCodec.stopRunning()
+ videoInputContinuation = nil
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift
new file mode 100644
index 000000000..b4ae79d4d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift
@@ -0,0 +1,33 @@
+import AVFoundation
+import Foundation
+
+/// A structure represents the volume value controller.
+public struct SoundTransform: Equatable, Sendable {
+ /// The default volume.
+ public static let defaultVolume: Float = 1.0
+ /// The default panning of the sound.
+ public static let defaultPan: Float = 0
+
+ /// The volume, ranging from 0 (silent) to 1 (full volume)
+ public var volume = SoundTransform.defaultVolume
+ /// The panning of the sound
+ public var pan = SoundTransform.defaultPan
+
+ /// Creates a new instance.
+ public init(volume: Float = SoundTransform.defaultVolume, pan: Float = SoundTransform.defaultPan) {
+ self.volume = volume
+ self.pan = pan
+ }
+
+ func apply(_ playerNode: AVAudioPlayerNode?) {
+ playerNode?.volume = volume
+ playerNode?.pan = pan
+ }
+}
+
+extension SoundTransform: CustomDebugStringConvertible {
+ // MARK: CustomDebugStringConvertible
+ public var debugDescription: String {
+ Mirror(reflecting: self).debugDescription
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift
new file mode 100644
index 000000000..c54cea830
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift
@@ -0,0 +1,73 @@
+import Foundation
+
+/// A type with a network bitrate strategy representation.
+public protocol StreamBitRateStrategy: Sendable {
+ /// The mamimum video bitRate.
+ var mamimumVideoBitRate: Int { get }
+ /// The mamimum audio bitRate.
+ var mamimumAudioBitRate: Int { get }
+
+ /// Adjust a bitRate.
+ func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async
+}
+
+/// An actor provides an algorithm that focuses on video bitrate control.
+public final actor StreamVideoAdaptiveBitRateStrategy: StreamBitRateStrategy {
+ /// The status counts threshold for restoring the status
+ public static let statusCountsThreshold: Int = 15
+
+ public let mamimumVideoBitRate: Int
+ public let mamimumAudioBitRate: Int = 0
+ private var sufficientBWCounts: Int = 0
+ private var zeroBytesOutPerSecondCounts: Int = 0
+
+ /// Creates a new instance.
+ public init(mamimumVideoBitrate: Int) {
+ self.mamimumVideoBitRate = mamimumVideoBitrate
+ }
+
+ public func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async {
+ switch event {
+ case .status:
+ var videoSettings = await stream.videoSettings
+ if videoSettings.bitRate == mamimumVideoBitRate {
+ return
+ }
+ if Self.statusCountsThreshold <= sufficientBWCounts {
+ let incremental = mamimumVideoBitRate / 10
+ videoSettings.bitRate = min(videoSettings.bitRate + incremental, mamimumVideoBitRate)
+ try? await stream.setVideoSettings(videoSettings)
+ sufficientBWCounts = 0
+ } else {
+ sufficientBWCounts += 1
+ }
+ case .publishInsufficientBWOccured(let report):
+ sufficientBWCounts = 0
+ var videoSettings = await stream.videoSettings
+ let audioSettings = await stream.audioSettings
+ if 0 < report.currentBytesOutPerSecond {
+ let bitRate = Int(report.currentBytesOutPerSecond * 8) / (zeroBytesOutPerSecondCounts + 1)
+ videoSettings.bitRate = max(bitRate - audioSettings.bitRate, mamimumVideoBitRate / 10)
+ videoSettings.frameInterval = 0.0
+ sufficientBWCounts = 0
+ zeroBytesOutPerSecondCounts = 0
+ } else {
+ switch zeroBytesOutPerSecondCounts {
+ case 2:
+ videoSettings.frameInterval = VideoCodecSettings.frameInterval10
+ case 4:
+ videoSettings.frameInterval = VideoCodecSettings.frameInterval05
+ default:
+ break
+ }
+ try? await stream.setVideoSettings(videoSettings)
+ zeroBytesOutPerSecondCounts += 1
+ }
+ case .reset:
+ var videoSettings = await stream.videoSettings
+ zeroBytesOutPerSecondCounts = 0
+ videoSettings.bitRate = mamimumVideoBitRate
+ try? await stream.setVideoSettings(videoSettings)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift
new file mode 100644
index 000000000..669543e83
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift
@@ -0,0 +1,106 @@
+import AVFAudio
+import AVFoundation
+import CoreImage
+import CoreMedia
+
+/// The interface is the foundation of the RTMPStream and SRTStream.
+public protocol StreamConvertible: Actor, MediaMixerOutput {
+ /// The current state of the stream.
+ var readyState: StreamReadyState { get }
+ /// The sound transform value control.
+ var soundTransform: SoundTransform? { get async }
+ /// The audio compression properties.
+ var audioSettings: AudioCodecSettings { get }
+ /// The video compression properties.
+ var videoSettings: VideoCodecSettings { get }
+
+ /// Sets the bitrate strategy object.
+ func setBitRateStrategy(_ bitRateStrategy: (some StreamBitRateStrategy)?)
+
+ /// Sets the audio compression properties.
+ func setAudioSettings(_ audioSettings: AudioCodecSettings) throws
+
+ /// Sets the video compression properties.
+ func setVideoSettings(_ videoSettings: VideoCodecSettings) throws
+
+ /// Sets the sound transform value control.
+ func setSoundTransform(_ soundTransfrom: SoundTransform) async
+
+ /// Sets the video input buffer counts.
+ func setVideoInputBufferCounts(_ videoInputBufferCounts: Int)
+
+ /// Appends a CMSampleBuffer.
+ /// - Parameters:
+ /// - sampleBuffer:The sample buffer to append.
+ func append(_ sampleBuffer: CMSampleBuffer)
+
+ /// Appends an AVAudioBuffer.
+ /// - Parameters:
+ /// - audioBuffer:The audio buffer to append.
+ /// - when: The audio time to append.
+ func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime)
+
+ /// Attaches an audio player instance for playback.
+ func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async
+
+ /// Adds an output observer.
+ func addOutput(_ obserber: some StreamOutput)
+
+ /// Removes an output observer.
+ func removeOutput(_ observer: some StreamOutput)
+
+ /// Dispatch a network monitor event.
+ func dispatch(_ event: NetworkMonitorEvent) async
+}
+
+package protocol _Stream: StreamConvertible {
+ var incoming: IncomingStream { get }
+ var outgoing: OutgoingStream { get }
+ var outputs: [any StreamOutput] { get set }
+ var bitRateStrategy: (any StreamBitRateStrategy)? { get set }
+}
+
+extension _Stream {
+ public var soundTransform: SoundTransform? {
+ get async {
+ await incoming.soundTransfrom
+ }
+ }
+
+ public var audioSettings: AudioCodecSettings {
+ outgoing.audioSettings
+ }
+
+ public var videoSettings: VideoCodecSettings {
+ outgoing.videoSettings
+ }
+
+ public func setBitRateStrategy(_ bitRateStrategy: (some StreamBitRateStrategy)?) {
+ self.bitRateStrategy = bitRateStrategy
+ }
+
+ public func setVideoInputBufferCounts(_ videoInputBufferCounts: Int) {
+ outgoing.videoInputBufferCounts = videoInputBufferCounts
+ }
+
+ public func setSoundTransform(_ soundTransform: SoundTransform) async {
+ await incoming.setSoundTransform(soundTransform)
+ }
+
+ public func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async {
+ await incoming.attachAudioPlayer(audioPlayer)
+ }
+
+ public func addOutput(_ observer: some StreamOutput) {
+ guard !outputs.contains(where: { $0 === observer }) else {
+ return
+ }
+ outputs.append(observer)
+ }
+
+ public func removeOutput(_ observer: some StreamOutput) {
+ if let index = outputs.firstIndex(where: { $0 === observer }) {
+ outputs.remove(at: index)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift
new file mode 100644
index 000000000..020fbab20
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift
@@ -0,0 +1,11 @@
+import AVFoundation
+import CoreMedia
+import Foundation
+
+/// A delegate protocol your app implements to receive capture stream output events.
+public protocol StreamOutput: AnyObject, Sendable {
+ /// Tells the receiver to an audio buffer outgoing.
+ func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime)
+ /// Tells the receiver to a video buffer outgoing.
+ func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer)
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift
new file mode 100644
index 000000000..9eafbcc63
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift
@@ -0,0 +1,15 @@
+import Foundation
+
+/// The enumeration defines the state a HKStream client is in.
+public enum StreamReadyState: Int, Sendable {
+ /// The stream is idling.
+ case idle
+ /// The stream has sent a request to play and is waiting for approval from the server.
+ case play
+ /// The stream is playing.
+ case playing
+ /// The streamhas sent a request to publish and is waiting for approval from the server.
+ case publish
+ /// The stream is publishing.
+ case publishing
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift
new file mode 100644
index 000000000..f225924ce
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift
@@ -0,0 +1,382 @@
+@preconcurrency import AVFoundation
+
+// MARK: -
+/// An actor represents video and audio recorder.
+///
+/// This actor is compatible with both StreamOutput and MediaMixerOutput. This means it can record the output from MediaMixer in addition to StreamConvertible.
+///
+/// ```swift
+/// // An example of recording MediaMixer.
+/// let recorder = StreamRecorder()
+/// let mixer = MediaMixer()
+/// mixer.addOutput(recorder)
+/// ```
+/// ```swift
+/// // An example of recording streaming.
+/// let recorder = StreamRecorder()
+/// let mixer = MediaMixer()
+/// let stream = RTMPStream()
+/// mixer.addOutput(stream)
+/// stream.addOutput(recorder)
+/// ```
+public actor StreamRecorder {
+ static let defaultPathExtension = "mp4"
+
+ /// The error domain codes.
+ public enum Error: Swift.Error {
+ /// An invalid internal stare.
+ case invalidState
+ /// The specified file already exists.
+ case fileAlreadyExists(outputURL: URL)
+ /// The specifiled file type is not supported.
+ case notSupportedFileType(pathExtension: String)
+ /// Failed to create the AVAssetWriter.
+ case failedToCreateAssetWriter(error: any Swift.Error)
+ /// Failed to create the AVAssetWriterInput.
+ case failedToCreateAssetWriterInput(error: any Swift.Error)
+ /// Failed to append the PixelBuffer or SampleBuffer.
+ case failedToAppend(error: (any Swift.Error)?)
+ /// Failed to finish writing the AVAssetWriter.
+ case failedToFinishWriting(error: (any Swift.Error)?)
+ }
+
+ /// The default recording settings.
+ public static let defaultSettings: [AVMediaType: [String: any Sendable]] = [
+ .audio: [
+ AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
+ AVSampleRateKey: 0,
+ AVNumberOfChannelsKey: 0
+ ],
+ .video: [
+ AVVideoCodecKey: AVVideoCodecType.h264,
+ AVVideoHeightKey: 0,
+ AVVideoWidthKey: 0
+ ]
+ ]
+
+ private static func isZero(_ value: any Sendable) -> Bool {
+ switch value {
+ case let value as Int:
+ return value == 0
+ case let value as Double:
+ return value == 0
+ default:
+ return false
+ }
+ }
+
+ enum SupportedFileType: String {
+ case mp4
+ case mov
+
+ var fileType: AVFileType {
+ switch self {
+ case .mp4:
+ return .mp4
+ case .mov:
+ return .mov
+ }
+ }
+ }
+
+ /// The recorder settings.
+ public private(set) var settings: [AVMediaType: [String: any Sendable]] = StreamRecorder.defaultSettings
+ /// The recording output url.
+ public var outputURL: URL? {
+ return writer?.outputURL
+ }
+ /// The current error.
+ public var error: AsyncStream {
+ AsyncStream { continuation in
+ self.continuation = continuation
+ }
+ }
+ /// The recording or not.
+ public private(set) var isRecording = false
+ /// The the movie fragment interval in sec.
+ public private(set) var movieFragmentInterval: Double?
+ public private(set) var videoTrackId: UInt8? = UInt8.max
+ public private(set) var audioTrackId: UInt8? = UInt8.max
+
+ #if os(macOS) && !targetEnvironment(macCatalyst)
+ /// The default file save location.
+ public private(set) var moviesDirectory: URL = {
+ URL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.moviesDirectory, .userDomainMask, true)[0])
+ }()
+ #else
+ /// The default file save location.
+ public private(set) lazy var moviesDirectory: URL = {
+ URL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0])
+ }()
+ #endif
+
+ private var isReadyForStartWriting: Bool {
+ guard let writer = writer else {
+ return false
+ }
+ return settings.count == writer.inputs.count
+ }
+ private var writer: AVAssetWriter?
+ private var continuation: AsyncStream.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+ private var writerInputs: [AVMediaType: AVAssetWriterInput] = [:]
+ private var audioPresentationTime: CMTime = .zero
+ private var videoPresentationTime: CMTime = .zero
+ private var dimensions: CMVideoDimensions = .init(width: 0, height: 0)
+
+ /// Creates a new recorder.
+ public init() {
+ }
+
+ /// Sets the movie fragment interval in sec.
+ ///
+ /// This value allows the file to be written continuously, so the file will remain even if the app crashes or is forcefully terminated. A value of 10 seconds or more is recommended.
+ /// - seealso: https://developer.apple.com/documentation/avfoundation/avassetwriter/1387469-moviefragmentinterval
+ public func setMovieFragmentInterval(_ movieFragmentInterval: Double?) {
+ if let movieFragmentInterval {
+ self.movieFragmentInterval = max(10.0, movieFragmentInterval)
+ } else {
+ self.movieFragmentInterval = nil
+ }
+ }
+
+ /// Starts recording.
+ ///
+ /// For iOS, if the URL is unspecified, the file will be saved in .documentDirectory. You can specify a folder of your choice, but please use an absolute path.
+ ///
+ /// ```
+ /// try? await recorder.startRecording(nil)
+ /// // -> $documentDirectory/B644F60F-0959-4F54-9D14-7F9949E02AD8.mp4
+ ///
+ /// try? await recorder.startRecording(URL(string: "dir/sample.mp4"))
+ /// // -> $documentDirectory/dir/sample.mp4
+ ///
+ /// try? await recorder.startRecording(await recorder.moviesDirectory.appendingPathComponent("sample.mp4"))
+ /// // -> $documentDirectory/sample.mp4
+ ///
+ /// try? await recorder.startRecording(URL(string: "dir"))
+ /// // -> $documentDirectory/dir/33FA7D32-E0A8-4E2C-9980-B54B60654044.mp4
+ /// ```
+ ///
+ /// - Note: Folders are not created automatically, so it’s expected that the target directory is created in advance.
+ /// - Parameters:
+ /// - url: The file path for recording. If nil is specified, a unique file path will be returned automatically.
+ /// - settings: Settings for recording.
+ /// - Throws: `Error.fileAlreadyExists` when case file already exists.
+ /// - Throws: `Error.notSupportedFileType` when case species not supported format.
+ public func startRecording(_ url: URL? = nil, settings: [AVMediaType: [String: any Sendable]] = StreamRecorder.defaultSettings) async throws {
+ guard !isRecording else {
+ throw Error.invalidState
+ }
+
+ let outputURL = makeOutputURL(url)
+ if FileManager.default.fileExists(atPath: outputURL.path) {
+ throw Error.fileAlreadyExists(outputURL: outputURL)
+ }
+
+ var fileType: AVFileType = .mp4
+ if let supportedFileType = SupportedFileType(rawValue: outputURL.pathExtension) {
+ fileType = supportedFileType.fileType
+ } else {
+ throw Error.notSupportedFileType(pathExtension: outputURL.pathExtension)
+ }
+
+ writer = try AVAssetWriter(outputURL: outputURL, fileType: fileType)
+ if let movieFragmentInterval {
+ writer?.movieFragmentInterval = CMTime(seconds: movieFragmentInterval, preferredTimescale: 1)
+ }
+ videoPresentationTime = .zero
+ audioPresentationTime = .zero
+ self.settings = settings
+
+ isRecording = true
+ }
+
+ /// Stops recording.
+ ///
+ /// ## Example of saving to the Photos app.
+ /// ```
+ /// do {
+ /// let outputURL = try await recorder.stopRecording()
+ /// PHPhotoLibrary.shared().performChanges({() -> Void in
+ /// PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
+ /// }, completionHandler: { _, error -> Void in
+ /// try? FileManager.default.removeItem(at: outputURL)
+ /// }
+ /// } catch {
+ /// print(error)
+ /// }
+ /// ```
+ public func stopRecording() async throws -> URL {
+ guard isRecording else {
+ throw Error.invalidState
+ }
+ defer {
+ isRecording = false
+ continuation = nil
+ self.writer = nil
+ self.writerInputs.removeAll()
+ }
+ guard let writer = writer, writer.status == .writing else {
+ throw Error.failedToFinishWriting(error: writer?.error)
+ }
+ for (_, input) in writerInputs {
+ input.markAsFinished()
+ }
+ await writer.finishWriting()
+ return writer.outputURL
+ }
+
+ public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) {
+ switch mediaType {
+ case .audio:
+ audioTrackId = id
+ case .video:
+ videoTrackId = id
+ default:
+ break
+ }
+ }
+
+ private func makeOutputURL(_ url: URL?) -> URL {
+ guard let url else {
+ return moviesDirectory.appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension)
+ }
+ // AVAssetWriter requires a isFileURL condition.
+ guard url.isFileURL else {
+ return url.pathExtension.isEmpty ?
+ moviesDirectory.appendingPathComponent(url.path).appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension) :
+ moviesDirectory.appendingPathComponent(url.path)
+ }
+ return url.pathExtension.isEmpty ? url.appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension) : url
+ }
+
+ private func append(_ sampleBuffer: CMSampleBuffer) {
+ guard isRecording else {
+ return
+ }
+ let mediaType: AVMediaType = (sampleBuffer.formatDescription?.mediaType == .video) ? .video : .audio
+ guard
+ let writer,
+ let input = makeWriterInput(mediaType, sourceFormatHint: sampleBuffer.formatDescription),
+ isReadyForStartWriting else {
+ return
+ }
+
+ switch writer.status {
+ case .unknown:
+ writer.startWriting()
+ writer.startSession(atSourceTime: sampleBuffer.presentationTimeStamp)
+ default:
+ break
+ }
+
+ if input.isReadyForMoreMediaData {
+ switch mediaType {
+ case .audio:
+ if input.append(sampleBuffer) {
+ audioPresentationTime = sampleBuffer.presentationTimeStamp
+ } else {
+ continuation?.yield(Error.failedToAppend(error: writer.error))
+ }
+ case .video:
+ if input.append(sampleBuffer) {
+ videoPresentationTime = sampleBuffer.presentationTimeStamp
+ } else {
+ continuation?.yield(Error.failedToAppend(error: writer.error))
+ }
+ default:
+ break
+ }
+ }
+ }
+
+ private func makeWriterInput(_ mediaType: AVMediaType, sourceFormatHint: CMFormatDescription?) -> AVAssetWriterInput? {
+ guard writerInputs[mediaType] == nil else {
+ return writerInputs[mediaType]
+ }
+
+ var outputSettings: [String: Any] = [:]
+ if let settings = self.settings[mediaType] {
+ switch mediaType {
+ case .audio:
+ guard
+ let format = sourceFormatHint,
+ let inSourceFormat = format.audioStreamBasicDescription else {
+ break
+ }
+ for (key, value) in settings {
+ switch key {
+ case AVSampleRateKey:
+ outputSettings[key] = Self.isZero(value) ? inSourceFormat.mSampleRate : value
+ case AVNumberOfChannelsKey:
+ outputSettings[key] = Self.isZero(value) ? Int(inSourceFormat.mChannelsPerFrame) : value
+ default:
+ outputSettings[key] = value
+ }
+ }
+ case .video:
+ dimensions = sourceFormatHint?.dimensions ?? .init(width: 0, height: 0)
+ for (key, value) in settings {
+ switch key {
+ case AVVideoHeightKey:
+ outputSettings[key] = Self.isZero(value) ? Int(dimensions.height) : value
+ case AVVideoWidthKey:
+ outputSettings[key] = Self.isZero(value) ? Int(dimensions.width) : value
+ default:
+ outputSettings[key] = value
+ }
+ }
+ default:
+ break
+ }
+ }
+
+ var input: AVAssetWriterInput?
+ if writer?.canApply(outputSettings: outputSettings, forMediaType: mediaType) == true {
+ input = AVAssetWriterInput(mediaType: mediaType, outputSettings: outputSettings, sourceFormatHint: sourceFormatHint)
+ input?.expectsMediaDataInRealTime = true
+ self.writerInputs[mediaType] = input
+ if let input {
+ self.writer?.add(input)
+ }
+ }
+
+ return input
+ }
+}
+
+extension StreamRecorder: StreamOutput {
+ // MARK: HKStreamOutput
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) {
+ Task { await append(video) }
+ }
+
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ guard let sampleBuffer = (audio as? AVAudioPCMBuffer)?.makeSampleBuffer(when) else {
+ return
+ }
+ Task { await append(sampleBuffer) }
+ }
+}
+
+extension StreamRecorder: MediaMixerOutput {
+ // MARK: MediaMixerOutput
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) {
+ Task {
+ await append(sampleBuffer)
+ }
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ guard let sampleBuffer = buffer.makeSampleBuffer(when) else {
+ return
+ }
+ Task {
+ await append(sampleBuffer)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift
new file mode 100644
index 000000000..c1fadcf1c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift
@@ -0,0 +1,52 @@
+import AVFAudio
+import Foundation
+
+enum AVAudioUtil {
+ static func makeAudioFormat(_ formatDescription: CMFormatDescription?) -> AVAudioFormat? {
+ guard var inSourceFormat = formatDescription?.audioStreamBasicDescription else {
+ return nil
+ }
+ if inSourceFormat.mFormatID == kAudioFormatLinearPCM && kLinearPCMFormatFlagIsBigEndian == (inSourceFormat.mFormatFlags & kLinearPCMFormatFlagIsBigEndian) {
+ let interleaved = !((inSourceFormat.mFormatFlags & kLinearPCMFormatFlagIsNonInterleaved) == kLinearPCMFormatFlagIsNonInterleaved)
+ if let channelLayout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) {
+ return .init(
+ commonFormat: .pcmFormatInt16,
+ sampleRate: inSourceFormat.mSampleRate,
+ interleaved: interleaved,
+ channelLayout: channelLayout
+ )
+ }
+ return .init(
+ commonFormat: .pcmFormatInt16,
+ sampleRate: inSourceFormat.mSampleRate,
+ channels: inSourceFormat.mChannelsPerFrame,
+ interleaved: interleaved
+ )
+ }
+ if let layout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) {
+ return .init(streamDescription: &inSourceFormat, channelLayout: layout)
+ }
+ return .init(streamDescription: &inSourceFormat)
+ }
+
+ static func makeChannelLayout(_ numberOfChannels: UInt32) -> AVAudioChannelLayout? {
+ guard 2 < numberOfChannels else {
+ return nil
+ }
+ switch numberOfChannels {
+ case 3:
+ // https://github.com/shogo4405/HaishinKit.swift/issues/1444
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_MPEG_3_0_B)
+ case 4:
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_4)
+ case 5:
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_5)
+ case 6:
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_6)
+ case 8:
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_8)
+ default:
+ return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | numberOfChannels)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift
new file mode 100644
index 000000000..7f8957a74
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift
@@ -0,0 +1,32 @@
+import Foundation
+
+@propertyWrapper
+package struct AsyncStreamed {
+ package var wrappedValue: AsyncStream {
+ get {
+ defer {
+ continuation.yield(value)
+ }
+ return stream
+ }
+ @available(*, unavailable)
+ set { _ = newValue }
+ }
+ package var value: T {
+ didSet {
+ guard value != oldValue else {
+ return
+ }
+ continuation.yield(value)
+ }
+ }
+ private let stream: AsyncStream
+ private let continuation: AsyncStream.Continuation
+
+ package init(_ value: T, bufferingPolicy limit: AsyncStream.Continuation.BufferingPolicy = .unbounded) {
+ let (stream, continuation) = AsyncStream.makeStream(of: T.self, bufferingPolicy: limit)
+ self.value = value
+ self.stream = stream
+ self.continuation = continuation
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift
new file mode 100644
index 000000000..ee5a181a4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift
@@ -0,0 +1,32 @@
+import Foundation
+
+@propertyWrapper
+package struct AsyncStreamedFlow {
+ package var wrappedValue: AsyncStream {
+ mutating get {
+ let (stream, continuation) = AsyncStream.makeStream(of: T.self, bufferingPolicy: bufferingPolicy)
+ self.continuation = continuation
+ return stream
+ }
+ @available(*, unavailable)
+ set { _ = newValue }
+ }
+ private let bufferingPolicy: AsyncStream.Continuation.BufferingPolicy
+ private var continuation: AsyncStream.Continuation? {
+ didSet {
+ oldValue?.finish()
+ }
+ }
+
+ package init(_ bufferingPolicy: AsyncStream.Continuation.BufferingPolicy = .unbounded) {
+ self.bufferingPolicy = bufferingPolicy
+ }
+
+ package func yield(_ value: T) {
+ continuation?.yield(value)
+ }
+
+ package mutating func finish() {
+ continuation = nil
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift
new file mode 100644
index 000000000..8c8e8300c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift
@@ -0,0 +1,55 @@
+import AVFoundation
+import Foundation
+
+/// A helper class for interoperating between AVAudioTime and CMTime.
+/// Conversion fails without hostTime on the AVAudioTime side, and cannot be saved with AVAssetWriter.
+final class AudioTime {
+ var at: AVAudioTime {
+ let now = AVAudioTime(sampleTime: sampleTime, atRate: sampleRate)
+ guard let anchorTime else {
+ return now
+ }
+ return now.extrapolateTime(fromAnchor: anchorTime) ?? now
+ }
+
+ var hasAnchor: Bool {
+ return anchorTime != nil
+ }
+
+ private var sampleRate: Double = 0.0
+ private var anchorTime: AVAudioTime?
+ private var sampleTime: AVAudioFramePosition = 0
+
+ func advanced(_ count: AVAudioFramePosition) {
+ sampleTime += count
+ }
+
+ func anchor(_ time: CMTime, sampleRate: Double) {
+ guard anchorTime == nil else {
+ return
+ }
+ self.sampleRate = sampleRate
+ if time.timescale == Int32(sampleRate) {
+ sampleTime = time.value
+ } else {
+ // ReplayKit .appAudio
+ sampleTime = Int64(Double(time.value) * sampleRate / Double(time.timescale))
+ }
+ anchorTime = .init(hostTime: AVAudioTime.hostTime(forSeconds: time.seconds), sampleTime: sampleTime, atRate: sampleRate)
+ }
+
+ func anchor(_ time: AVAudioTime) {
+ guard anchorTime == nil else {
+ return
+ }
+ sampleRate = time.sampleRate
+ sampleTime = time.sampleTime
+ anchorTime = time
+ }
+
+ func reset() {
+ sampleRate = 0.0
+ sampleTime = 0
+ anchorTime = nil
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift
new file mode 100644
index 000000000..a6aa59403
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift
@@ -0,0 +1,398 @@
+import Foundation
+
+protocol ByteArrayConvertible {
+ var data: Data { get }
+ var length: Int { get set }
+ var position: Int { get set }
+ var bytesAvailable: Int { get }
+
+ subscript(i: Int) -> UInt8 { get set }
+
+ @discardableResult
+ func writeUInt8(_ value: UInt8) -> Self
+ func readUInt8() throws -> UInt8
+
+ @discardableResult
+ func writeInt8(_ value: Int8) -> Self
+ func readInt8() throws -> Int8
+
+ @discardableResult
+ func writeUInt16(_ value: UInt16) -> Self
+ func readUInt16() throws -> UInt16
+
+ @discardableResult
+ func writeInt16(_ value: Int16) -> Self
+ func readInt16() throws -> Int16
+
+ @discardableResult
+ func writeUInt24(_ value: UInt32) -> Self
+ func readUInt24() throws -> UInt32
+
+ @discardableResult
+ func writeUInt32(_ value: UInt32) -> Self
+ func readUInt32() throws -> UInt32
+
+ @discardableResult
+ func writeInt32(_ value: Int32) -> Self
+ func readInt32() throws -> Int32
+
+ @discardableResult
+ func writeUInt64(_ value: UInt64) -> Self
+ func readUInt64() throws -> UInt64
+
+ @discardableResult
+ func writeInt64(_ value: Int64) -> Self
+ func readInt64() throws -> Int64
+
+ @discardableResult
+ func writeDouble(_ value: Double) -> Self
+ func readDouble() throws -> Double
+
+ @discardableResult
+ func writeFloat(_ value: Float) -> Self
+ func readFloat() throws -> Float
+
+ @discardableResult
+ func writeUTF8(_ value: String) throws -> Self
+ func readUTF8() throws -> String
+
+ @discardableResult
+ func writeUTF8Bytes(_ value: String) -> Self
+ func readUTF8Bytes(_ length: Int) throws -> String
+
+ @discardableResult
+ func writeBytes(_ value: Data) -> Self
+ func readBytes(_ length: Int) throws -> Data
+
+ @discardableResult
+ func clear() -> Self
+}
+
+// MARK: -
+/**
+ * The ByteArray class provides methods and properties the reading or writing with binary data.
+ */
+class ByteArray: ByteArrayConvertible {
+ static let fillZero: [UInt8] = [0x00]
+
+ static let sizeOfInt8: Int = 1
+ static let sizeOfInt16: Int = 2
+ static let sizeOfInt24: Int = 3
+ static let sizeOfInt32: Int = 4
+ static let sizeOfFloat: Int = 4
+ static let sizeOfInt64: Int = 8
+ static let sizeOfDouble: Int = 8
+
+ /**
+ * The ByteArray error domain codes.
+ */
+ enum Error: Swift.Error {
+ /// Error cause end of data.
+ case eof
+ /// Failed to parse
+ case parse
+ }
+
+ /// Creates an empty ByteArray.
+ init() {
+ }
+
+ /// Creates a ByteArray with data.
+ init(data: Data) {
+ self.data = data
+ }
+
+ private(set) var data = Data()
+
+ /// Specifies the length of buffer.
+ var length: Int {
+ get {
+ data.count
+ }
+ set {
+ switch true {
+ case (data.count < newValue):
+ data.append(Data(count: newValue - data.count))
+ case (newValue < data.count):
+ data = data.subdata(in: 0.. UInt8 {
+ get {
+ data[i]
+ }
+ set {
+ data[i] = newValue
+ }
+ }
+
+ /// Reading an UInt8 value.
+ func readUInt8() throws -> UInt8 {
+ guard ByteArray.sizeOfInt8 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ defer {
+ position += 1
+ }
+ return data[position]
+ }
+
+ /// Writing an UInt8 value.
+ @discardableResult
+ func writeUInt8(_ value: UInt8) -> Self {
+ writeBytes(value.data)
+ }
+
+ /// Readning an Int8 value.
+ func readInt8() throws -> Int8 {
+ guard ByteArray.sizeOfInt8 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ defer {
+ position += 1
+ }
+ return Int8(bitPattern: UInt8(data[position]))
+ }
+
+ /// Writing an Int8 value.
+ @discardableResult
+ func writeInt8(_ value: Int8) -> Self {
+ writeBytes(UInt8(bitPattern: value).data)
+ }
+
+ /// Readning an UInt16 value.
+ func readUInt16() throws -> UInt16 {
+ guard ByteArray.sizeOfInt16 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt16
+ return UInt16(data: data[position - ByteArray.sizeOfInt16.. Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Reading an Int16 value.
+ func readInt16() throws -> Int16 {
+ guard ByteArray.sizeOfInt16 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt16
+ return Int16(data: data[position - ByteArray.sizeOfInt16.. Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Reading an UInt24 value.
+ func readUInt24() throws -> UInt32 {
+ guard ByteArray.sizeOfInt24 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt24
+ return UInt32(data: ByteArray.fillZero + data[position - ByteArray.sizeOfInt24.. Self {
+ writeBytes(value.bigEndian.data.subdata(in: 1.. UInt32 {
+ guard ByteArray.sizeOfInt32 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt32
+ return UInt32(data: data[position - ByteArray.sizeOfInt32.. Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Reading an Int32 value.
+ func readInt32() throws -> Int32 {
+ guard ByteArray.sizeOfInt32 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt32
+ return Int32(data: data[position - ByteArray.sizeOfInt32.. Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Writing an UInt64 value.
+ @discardableResult
+ func writeUInt64(_ value: UInt64) -> Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Reading an UInt64 value.
+ func readUInt64() throws -> UInt64 {
+ guard ByteArray.sizeOfInt64 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt64
+ return UInt64(data: data[position - ByteArray.sizeOfInt64.. Self {
+ writeBytes(value.bigEndian.data)
+ }
+
+ /// Reading an Int64 value.
+ func readInt64() throws -> Int64 {
+ guard ByteArray.sizeOfInt64 <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfInt64
+ return Int64(data: data[position - ByteArray.sizeOfInt64.. Double {
+ guard ByteArray.sizeOfDouble <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfDouble
+ return Double(data: Data(data.subdata(in: position - ByteArray.sizeOfDouble.. Self {
+ writeBytes(Data(value.data.reversed()))
+ }
+
+ /// Reading a Float value.
+ func readFloat() throws -> Float {
+ guard ByteArray.sizeOfFloat <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += ByteArray.sizeOfFloat
+ return Float(data: Data(data.subdata(in: position - ByteArray.sizeOfFloat.. Self {
+ writeBytes(Data(value.data.reversed()))
+ }
+
+ /// Reading a string as UTF8 value.
+ func readUTF8() throws -> String {
+ try readUTF8Bytes(Int(try readUInt16()))
+ }
+
+ /// Writing a string as UTF8 value.
+ @discardableResult
+ func writeUTF8(_ value: String) throws -> Self {
+ let utf8 = Data(value.utf8)
+ return writeUInt16(UInt16(utf8.count)).writeBytes(utf8)
+ }
+
+ /// Clear the buffer.
+ @discardableResult
+ func clear() -> Self {
+ position = 0
+ data.removeAll()
+ return self
+ }
+
+ func readUTF8Bytes(_ length: Int) throws -> String {
+ guard length <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += length
+
+ guard let result = String(data: data.subdata(in: position - length.. Self {
+ writeBytes(Data(value.utf8))
+ }
+
+ func readBytes(_ length: Int) throws -> Data {
+ guard length <= bytesAvailable else {
+ throw ByteArray.Error.eof
+ }
+ position += length
+ return data.subdata(in: position - length.. Self {
+ if position == data.count {
+ data.append(value)
+ position = data.count
+ return self
+ }
+ let length: Int = min(data.count, value.count)
+ data[position.. Void)) {
+ let r: Int = (data.count - position) % length
+ for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex.advanced(by: -r), by: length) {
+ lambda(ByteArray(data: data.subdata(in: index.. [UInt32] {
+ let size: Int = MemoryLayout.size
+ if (data.endIndex - position) % size != 0 {
+ return []
+ }
+ var result: [UInt32] = []
+ for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex, by: size) {
+ result.append(UInt32(data: data[index.. AVCaptureVideoOrientation? {
+ guard let device = notification.object as? UIDevice else {
+ return nil
+ }
+ return videoOrientation(by: device.orientation)
+ }
+
+ /// Looks up the AVCaptureVideoOrientation by an UIDeviceOrientation.
+ public static func videoOrientation(by orientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? {
+ switch orientation {
+ case .portrait:
+ return .portrait
+ case .portraitUpsideDown:
+ return .portraitUpsideDown
+ case .landscapeLeft:
+ return .landscapeRight
+ case .landscapeRight:
+ return .landscapeLeft
+ default:
+ return nil
+ }
+ }
+
+ /// Looks up the AVCaptureVideoOrientation by an UIInterfaceOrientation.
+ public static func videoOrientation(by orientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? {
+ switch orientation {
+ case .portrait:
+ return .portrait
+ case .portraitUpsideDown:
+ return .portraitUpsideDown
+ case .landscapeLeft:
+ return .landscapeLeft
+ case .landscapeRight:
+ return .landscapeRight
+ default:
+ return nil
+ }
+ }
+
+ /// Device is connected a headphone or not.
+ public static func isHeadphoneConnected(_ ports: Set = [.headphones, .bluetoothLE, .bluetoothHFP, .bluetoothA2DP]) -> Bool {
+ let outputs = AVAudioSession.sharedInstance().currentRoute.outputs
+ for description in outputs where ports.contains(description.portType) {
+ return true
+ }
+ return false
+ }
+
+ /// Device is disconnected a headphone or not.
+ public static func isHeadphoneDisconnected(_ notification: Notification, ports: Set = [.headphones, .bluetoothLE, .bluetoothHFP, .bluetoothA2DP]) -> Bool {
+ guard let previousRoute = notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription else {
+ return false
+ }
+ var isHeadohoneConnected = false
+ for output in previousRoute.outputs where ports.contains(output.portType) {
+ isHeadohoneConnected = true
+ break
+ }
+ if !isHeadohoneConnected {
+ return false
+ }
+ return !DeviceUtil.isHeadphoneConnected(ports)
+ }
+}
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift
new file mode 100644
index 000000000..902eaacbf
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift
@@ -0,0 +1,26 @@
+import CoreMedia
+
+struct FrameTracker {
+ static let seconds = 1.0
+
+ private(set) var frameRate: Int = 0
+ private var count = 0
+ private var rotated: CMTime = .zero
+
+ init() {
+ }
+
+ mutating func update(_ time: CMTime) {
+ count += 1
+ if Self.seconds <= (time - rotated).seconds {
+ rotated = time
+ frameRate = count
+ count = 0
+ }
+ }
+
+ mutating func clear() {
+ count = 0
+ rotated = .zero
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift
new file mode 100644
index 000000000..f778c3e5b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift
@@ -0,0 +1,21 @@
+import Foundation
+
+/// A type that methods for running.
+public protocol Runner: AnyObject {
+ /// Indicates whether the receiver is running.
+ var isRunning: Bool { get }
+ /// Tells the receiver to start running.
+ func startRunning()
+ /// Tells the receiver to stop running.
+ func stopRunning()
+}
+
+/// A type that methods for running.
+public protocol AsyncRunner: Actor {
+ /// Indicates whether the receiver is running.
+ var isRunning: Bool { get }
+ /// Tells the receiver to start running.
+ func startRunning() async
+ /// Tells the receiver to stop running.
+ func stopRunning() async
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift
new file mode 100644
index 000000000..8744b8723
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift
@@ -0,0 +1,65 @@
+import CoreMedia
+import Foundation
+
+final class TypedBlockQueue {
+ private let queue: CMBufferQueue
+ private let capacity: CMItemCount
+
+ @inlinable @inline(__always) var head: T? {
+ guard let head = queue.head else {
+ return nil
+ }
+ return (head as! T)
+ }
+
+ @inlinable @inline(__always) var isEmpty: Bool {
+ queue.isEmpty
+ }
+
+ @inlinable @inline(__always) var duration: CMTime {
+ queue.duration
+ }
+
+ init(capacity: CMItemCount, handlers: CMBufferQueue.Handlers) throws {
+ self.capacity = capacity
+ self.queue = try CMBufferQueue(capacity: capacity, handlers: handlers)
+ }
+
+ @inlinable
+ @inline(__always)
+ func enqueue(_ buffer: T) throws {
+ try queue.enqueue(buffer)
+ }
+
+ @inlinable
+ @inline(__always)
+ func dequeue() -> T? {
+ guard let value = queue.dequeue() else {
+ return nil
+ }
+ return (value as! T)
+ }
+
+ @inlinable
+ @inline(__always)
+ func reset() throws {
+ try queue.reset()
+ }
+}
+
+extension TypedBlockQueue where T == CMSampleBuffer {
+ func dequeue(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? {
+ var result: CMSampleBuffer?
+ while !queue.isEmpty {
+ guard let head else {
+ break
+ }
+ if head.presentationTimeStamp <= presentationTimeStamp {
+ result = dequeue()
+ } else {
+ return result
+ }
+ }
+ return result
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift
new file mode 100644
index 000000000..66dfa89ae
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift
@@ -0,0 +1,174 @@
+#if os(iOS) || os(tvOS) || os(macOS)
+
+import AVFoundation
+import MetalKit
+
+/// A view that displays a video content of a NetStream object which uses Metal api.
+public class MTHKView: MTKView {
+ /// Specifies how the video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect
+ public var videoTrackId: UInt8? = UInt8.max
+ public var audioTrackId: UInt8?
+ private var displayImage: CIImage?
+ private lazy var commandQueue: (any MTLCommandQueue)? = {
+ return device?.makeCommandQueue()
+ }()
+ private var context: CIContext?
+ private var effects: [any VideoEffect] = .init()
+
+ /// Initializes and returns a newly allocated view object with the specified frame rectangle.
+ public init(frame: CGRect) {
+ super.init(frame: frame, device: MTLCreateSystemDefaultDevice())
+ awakeFromNib()
+ }
+
+ /// Returns an object initialized from data in a given unarchiver.
+ public required init(coder aDecoder: NSCoder) {
+ super.init(coder: aDecoder)
+ self.device = MTLCreateSystemDefaultDevice()
+ }
+
+ /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file.
+ override public func awakeFromNib() {
+ super.awakeFromNib()
+ Task { @MainActor in
+ framebufferOnly = false
+ enableSetNeedsDisplay = true
+ if let device {
+ context = CIContext(mtlDevice: device, options: [.cacheIntermediates: false, .name: "MTHKView"])
+ }
+ }
+ }
+
+ /// Redraws the view’s contents.
+ override public func draw(_ rect: CGRect) {
+ guard
+ let context,
+ let currentDrawable = currentDrawable,
+ let commandBuffer = commandQueue?.makeCommandBuffer() else {
+ return
+ }
+ if
+ let currentRenderPassDescriptor = currentRenderPassDescriptor,
+ let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) {
+ renderCommandEncoder.endEncoding()
+ }
+ guard let displayImage else {
+ commandBuffer.present(currentDrawable)
+ commandBuffer.commit()
+ return
+ }
+
+ var scaleX: CGFloat = 0
+ var scaleY: CGFloat = 0
+ var translationX: CGFloat = 0
+ var translationY: CGFloat = 0
+ switch videoGravity {
+ case .resize:
+ scaleX = drawableSize.width / displayImage.extent.width
+ scaleY = drawableSize.height / displayImage.extent.height
+ case .resizeAspect:
+ let scale: CGFloat = min(drawableSize.width / displayImage.extent.width, drawableSize.height / displayImage.extent.height)
+ scaleX = scale
+ scaleY = scale
+ translationX = (drawableSize.width - displayImage.extent.width * scale) / scaleX / 2
+ translationY = (drawableSize.height - displayImage.extent.height * scale) / scaleY / 2
+ case .resizeAspectFill:
+ let scale: CGFloat = max(drawableSize.width / displayImage.extent.width, drawableSize.height / displayImage.extent.height)
+ scaleX = scale
+ scaleY = scale
+ translationX = (drawableSize.width - displayImage.extent.width * scale) / scaleX / 2
+ translationY = (drawableSize.height - displayImage.extent.height * scale) / scaleY / 2
+ default:
+ break
+ }
+
+ var scaledImage: CIImage = displayImage
+ for effect in effects {
+ scaledImage = effect.execute(scaledImage)
+ }
+
+ scaledImage = scaledImage
+ .transformed(by: CGAffineTransform(translationX: translationX, y: translationY))
+ .transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY))
+
+ let destination = CIRenderDestination(
+ width: Int(drawableSize.width),
+ height: Int(drawableSize.height),
+ pixelFormat: colorPixelFormat,
+ commandBuffer: commandBuffer,
+ mtlTextureProvider: { () -> (any MTLTexture) in
+ return currentDrawable.texture
+ })
+
+ _ = try? context.startTask(toRender: scaledImage, to: destination)
+
+ commandBuffer.present(currentDrawable)
+ commandBuffer.commit()
+ }
+
+ /// Registers a video effect.
+ public func registerVideoEffect(_ effect: some VideoEffect) -> Bool {
+ if effects.contains(where: { $0 === effect }) {
+ return false
+ }
+ effects.append(effect)
+ return true
+ }
+
+ /// Unregisters a video effect.
+ public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool {
+ if let index = effects.firstIndex(where: { $0 === effect }) {
+ effects.remove(at: index)
+ return true
+ }
+ return false
+ }
+}
+
+extension MTHKView: MediaMixerOutput {
+ // MARK: MediaMixerOutput
+ public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async {
+ switch mediaType {
+ case .audio:
+ break
+ case .video:
+ videoTrackId = id
+ default:
+ break
+ }
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) {
+ Task { @MainActor in
+ displayImage = try? sampleBuffer.imageBuffer?.makeCIImage()
+ #if os(macOS)
+ self.needsDisplay = true
+ #else
+ self.setNeedsDisplay()
+ #endif
+ }
+ }
+}
+
+extension MTHKView: StreamOutput {
+ // MARK: HKStreamOutput
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ }
+
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) {
+ Task { @MainActor in
+ displayImage = try? video.imageBuffer?.makeCIImage()
+ #if os(macOS)
+ self.needsDisplay = true
+ #else
+ self.setNeedsDisplay()
+ #endif
+ }
+ }
+}
+
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift
new file mode 100644
index 000000000..307225a0a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift
@@ -0,0 +1,89 @@
+#if os(iOS) || os(tvOS)
+import AVFoundation
+import SwiftUI
+
+/// A SwiftUI view that displays using a `MTHKView`.
+public struct MTHKViewRepresentable: UIViewRepresentable {
+ /// A type that presents the captured content.
+ public protocol PreviewSource {
+ func connect(to view: MTHKView)
+ }
+
+ public typealias UIViewType = MTHKView
+
+ /// Specifies the preview source.
+ public let previewSource: any PreviewSource
+ /// Specifies the videoGravity for MTHKView.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect
+
+ private var view = MTHKView(frame: .zero)
+
+ /// Creates a view representable.
+ public init(previewSource: some PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) {
+ self.previewSource = previewSource
+ self.videoGravity = videoGravity
+ }
+
+ /// Selects track id for streaming.
+ public func track(_ id: UInt8?) -> Self {
+ Task { @MainActor in
+ await view.selectTrack(id, mediaType: .video)
+ }
+ return self
+ }
+
+ public func makeUIView(context: Context) -> MTHKView {
+ previewSource.connect(to: view)
+ return view
+ }
+
+ public func updateUIView(_ uiView: MTHKView, context: Context) {
+ uiView.videoGravity = videoGravity
+ }
+}
+
+#elseif os(macOS)
+import AVFoundation
+import SwiftUI
+
+/// A SwiftUI view that displays using a `MTHKView`.
+public struct MTHKViewRepresentable: NSViewRepresentable {
+ /// A type that presents the captured content.
+ public protocol PreviewSource {
+ func connect(to view: MTHKView)
+ }
+
+ public typealias NSViewType = MTHKView
+
+ /// Specifies the preview source.
+ public let previewSource: any PreviewSource
+ /// Specifies the videoGravity for MTHKView.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect
+
+ private var view = MTHKView(frame: .zero)
+
+ /// Creates a view representable.
+ public init(previewSource: some PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) {
+ self.previewSource = previewSource
+ self.videoGravity = videoGravity
+ }
+
+ /// Selects track id for streaming.
+ public func track(_ id: UInt8?) -> Self {
+ Task { @MainActor in
+ await view.selectTrack(id, mediaType: .video)
+ }
+ return self
+ }
+
+ public func makeNSView(context: Context) -> MTHKView {
+ previewSource.connect(to: view)
+ return view
+ }
+
+ public func updateNSView(_ nsView: MTHKView, context: Context) {
+ nsView.videoGravity = videoGravity
+ }
+}
+
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift
new file mode 100644
index 000000000..a94eb108d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift
@@ -0,0 +1,143 @@
+#if os(iOS) || os(tvOS) || os(visionOS)
+import AVFoundation
+import Foundation
+import UIKit
+
+/// A view that displays a video content of a NetStream object which uses AVSampleBufferDisplayLayer api.
+public class PiPHKView: UIView {
+ /// The view’s background color.
+ public static var defaultBackgroundColor: UIColor = .black
+
+ /// Returns the class used to create the layer for instances of this class.
+ override public class var layerClass: AnyClass {
+ AVSampleBufferDisplayLayer.self
+ }
+
+ /// The view’s Core Animation layer used for rendering.
+ override public var layer: AVSampleBufferDisplayLayer {
+ super.layer as! AVSampleBufferDisplayLayer
+ }
+
+ public var videoTrackId: UInt8? = UInt8.max
+ public var audioTrackId: UInt8?
+
+ /// A value that specifies how the video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect {
+ didSet {
+ layer.videoGravity = videoGravity
+ }
+ }
+
+ /// Initializes and returns a newly allocated view object with the specified frame rectangle.
+ override public init(frame: CGRect) {
+ super.init(frame: frame)
+ awakeFromNib()
+ }
+
+ /// Returns an object initialized from data in a given unarchiver.
+ public required init?(coder aDecoder: NSCoder) {
+ super.init(coder: aDecoder)
+ }
+
+ /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file.
+ override public func awakeFromNib() {
+ super.awakeFromNib()
+ Task { @MainActor in
+ backgroundColor = Self.defaultBackgroundColor
+ layer.backgroundColor = Self.defaultBackgroundColor.cgColor
+ layer.videoGravity = videoGravity
+ }
+ }
+}
+#else
+
+import AppKit
+import AVFoundation
+
+/// A view that displays a video content of a NetStream object which uses AVSampleBufferDisplayLayer api.
+public class PiPHKView: NSView {
+ /// The view’s background color.
+ public static var defaultBackgroundColor: NSColor = .black
+
+ /// A value that specifies how the video is displayed within a player layer’s bounds.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect {
+ didSet {
+ layer?.setValue(videoGravity, forKey: "videoGravity")
+ }
+ }
+
+ /// Specifies how the video is displayed with in track.
+ public var videoTrackId: UInt8? = UInt8.max
+ public var audioTrackId: UInt8?
+
+ /// Initializes and returns a newly allocated view object with the specified frame rectangle.
+ override public init(frame: CGRect) {
+ super.init(frame: frame)
+ awakeFromNib()
+ }
+
+ /// Returns an object initialized from data in a given unarchiver.
+ public required init?(coder aDecoder: NSCoder) {
+ super.init(coder: aDecoder)
+ }
+
+ /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file.
+ override public func awakeFromNib() {
+ super.awakeFromNib()
+ Task { @MainActor in
+ wantsLayer = true
+ layer = AVSampleBufferDisplayLayer()
+ layer?.backgroundColor = PiPHKView.defaultBackgroundColor.cgColor
+ layer?.setValue(videoGravity, forKey: "videoGravity")
+ }
+ }
+}
+
+#endif
+
+extension PiPHKView: MediaMixerOutput {
+ // MARK: MediaMixerOutput
+ public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async {
+ switch mediaType {
+ case .audio:
+ break
+ case .video:
+ videoTrackId = id
+ default:
+ break
+ }
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) {
+ Task { @MainActor in
+ #if os(macOS)
+ (layer as? AVSampleBufferDisplayLayer)?.enqueue(sampleBuffer)
+ self.needsDisplay = true
+ #else
+ (layer as AVSampleBufferDisplayLayer).enqueue(sampleBuffer)
+ self.setNeedsDisplay()
+ #endif
+ }
+ }
+}
+
+extension PiPHKView: StreamOutput {
+ // MARK: HKStreamOutput
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) {
+ }
+
+ nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) {
+ Task { @MainActor in
+ #if os(macOS)
+ (layer as? AVSampleBufferDisplayLayer)?.enqueue(video)
+ self.needsDisplay = true
+ #else
+ (layer as AVSampleBufferDisplayLayer).enqueue(video)
+ self.setNeedsDisplay()
+ #endif
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift
new file mode 100644
index 000000000..5996d5089
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift
@@ -0,0 +1,89 @@
+#if os(iOS) || os(tvOS) || os(visionOS)
+import AVFoundation
+import SwiftUI
+
+/// A SwiftUI view that displays using a `PiPHKView`.
+public struct PiPHKViewRepresentable: UIViewRepresentable {
+ /// A type that presents the captured content.
+ public protocol PreviewSource {
+ func connect(to view: PiPHKView)
+ }
+
+ public typealias UIViewType = PiPHKView
+
+ /// Specifies the preview source.
+ public let previewSource: any PreviewSource
+ /// Specifies the videoGravity for PiPHKView.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect
+
+ private var view = PiPHKView(frame: .zero)
+
+ /// Creates a view representable.
+ public init(previewSource: any PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) {
+ self.previewSource = previewSource
+ self.videoGravity = videoGravity
+ }
+
+ /// Selects track id for streaming.
+ public func track(_ id: UInt8?) -> Self {
+ Task { @MainActor in
+ await view.selectTrack(id, mediaType: .video)
+ }
+ return self
+ }
+
+ public func makeUIView(context: Context) -> PiPHKView {
+ previewSource.connect(to: view)
+ return view
+ }
+
+ public func updateUIView(_ uiView: PiPHKView, context: Context) {
+ uiView.videoGravity = videoGravity
+ }
+}
+
+#else
+import AVFoundation
+import SwiftUI
+
+/// A SwiftUI view that displays using a `PiPHKView`.
+public struct PiPHKViewRepresentable: NSViewRepresentable {
+ /// A type that presents the captured content.
+ public protocol PreviewSource {
+ func connect(to view: PiPHKView)
+ }
+
+ public typealias NSViewType = PiPHKView
+
+ /// Specifies the preview source.
+ public let previewSource: any PreviewSource
+ /// Specifies the videoGravity for PiPHKView.
+ public var videoGravity: AVLayerVideoGravity = .resizeAspect
+
+ private var view = PiPHKView(frame: .zero)
+
+ /// Creates a view representable.
+ public init(previewSource: any PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) {
+ self.previewSource = previewSource
+ self.videoGravity = videoGravity
+ }
+
+ /// Selects track id for streaming.
+ public func track(_ id: UInt8?) -> Self {
+ Task { @MainActor in
+ await view.selectTrack(id, mediaType: .video)
+ }
+ return self
+ }
+
+ public func makeNSView(context: Context) -> PiPHKView {
+ previewSource.connect(to: view)
+ return view
+ }
+
+ public func updateNSView(_ nsView: PiPHKView, context: Context) {
+ nsView.videoGravity = videoGravity
+ }
+}
+
+#endif
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift
new file mode 100644
index 000000000..f0b705bd7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift
@@ -0,0 +1,56 @@
+import AVFoundation
+@testable import HaishinKit
+
+enum AVAudioPCMBufferFactory {
+ static func makeSinWave(_ sampleRate: Double = 44100, numSamples: Int = 1024, channels: UInt32 = 1) -> AVAudioPCMBuffer? {
+ var streamDescription = AudioStreamBasicDescription(
+ mSampleRate: sampleRate,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: 0xc,
+ mBytesPerPacket: 2 * channels,
+ mFramesPerPacket: 1,
+ mBytesPerFrame: 2 * channels,
+ mChannelsPerFrame: channels,
+ mBitsPerChannel: 16,
+ mReserved: 0
+ )
+
+ guard let format = AVAudioFormat(streamDescription: &streamDescription, channelLayout: AVAudioUtil.makeChannelLayout(channels)) else {
+ return nil
+ }
+
+ let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(numSamples))!
+ buffer.frameLength = buffer.frameCapacity
+
+ let channels = Int(format.channelCount)
+ let samples = buffer.int16ChannelData![0]
+ for n in 0.. CMSampleBuffer? {
+ var asbd = AudioStreamBasicDescription(
+ mSampleRate: sampleRate,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: 0xc,
+ mBytesPerPacket: 2 * channels,
+ mFramesPerPacket: 1,
+ mBytesPerFrame: 2 * channels,
+ mChannelsPerFrame: channels,
+ mBitsPerChannel: 16,
+ mReserved: 0
+ )
+ var formatDescription: CMAudioFormatDescription?
+ var status: OSStatus = noErr
+ var blockBuffer: CMBlockBuffer?
+ let blockSize = numSamples * Int(asbd.mBytesPerPacket)
+ status = CMBlockBufferCreateWithMemoryBlock(
+ allocator: nil,
+ memoryBlock: nil,
+ blockLength: blockSize,
+ blockAllocator: nil,
+ customBlockSource: nil,
+ offsetToData: 0,
+ dataLength: blockSize,
+ flags: 0,
+ blockBufferOut: &blockBuffer
+ )
+ status = CMAudioFormatDescriptionCreate(
+ allocator: kCFAllocatorDefault,
+ asbd: &asbd,
+ layoutSize: 0,
+ layout: nil,
+ magicCookieSize: 0,
+ magicCookie: nil,
+ extensions: nil,
+ formatDescriptionOut: &formatDescription
+ )
+ guard let blockBuffer, status == noErr else {
+ return nil
+ }
+ status = CMBlockBufferFillDataBytes(
+ with: 0,
+ blockBuffer: blockBuffer,
+ offsetIntoDestination: 0,
+ dataLength: blockSize
+ )
+ guard status == noErr else {
+ return nil
+ }
+ var sampleBuffer: CMSampleBuffer?
+ status = CMAudioSampleBufferCreateWithPacketDescriptions(
+ allocator: nil,
+ dataBuffer: blockBuffer,
+ dataReady: true,
+ makeDataReadyCallback: nil,
+ refcon: nil,
+ formatDescription: formatDescription!,
+ sampleCount: numSamples,
+ presentationTimeStamp: presentaionTimeStamp,
+ packetDescriptions: nil,
+ sampleBufferOut: &sampleBuffer
+ )
+ guard let sampleBuffer, status == noErr else {
+ return nil
+ }
+ return sampleBuffer
+ }
+
+ static func makeSinWave(_ sampleRate: Double = 44100, numSamples: Int = 1024, channels: UInt32 = 1) -> CMSampleBuffer? {
+ var status: OSStatus = noErr
+ var sampleBuffer: CMSampleBuffer?
+ var timing = CMSampleTimingInfo(
+ duration: CMTime(value: 1, timescale: Int32(sampleRate)),
+ presentationTimeStamp: CMTime.zero,
+ decodeTimeStamp: CMTime.invalid
+ )
+
+ var streamDescription = AudioStreamBasicDescription(
+ mSampleRate: sampleRate,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: 0xc,
+ mBytesPerPacket: 2 * channels,
+ mFramesPerPacket: 1,
+ mBytesPerFrame: 2 * channels,
+ mChannelsPerFrame: channels,
+ mBitsPerChannel: 16,
+ mReserved: 0
+ )
+
+ guard let format = AVAudioFormat(streamDescription: &streamDescription, channelLayout: AVAudioUtil.makeChannelLayout(channels)) else {
+ return nil
+ }
+
+ status = CMSampleBufferCreate(
+ allocator: kCFAllocatorDefault,
+ dataBuffer: nil,
+ dataReady: false,
+ makeDataReadyCallback: nil,
+ refcon: nil,
+ formatDescription: format.formatDescription,
+ sampleCount: numSamples,
+ sampleTimingEntryCount: 1,
+ sampleTimingArray: &timing,
+ sampleSizeEntryCount: 0,
+ sampleSizeArray: nil,
+ sampleBufferOut: &sampleBuffer
+ )
+
+ guard status == noErr else {
+ return nil
+ }
+
+ let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(numSamples))!
+ buffer.frameLength = buffer.frameCapacity
+
+ let channels = Int(format.channelCount)
+ let samples = buffer.int16ChannelData![0]
+ for n in 0.. CMSampleBuffer? {
+ var blockBuffer: CMBlockBuffer?
+ _ = data.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) in
+ CMBlockBufferCreateWithMemoryBlock(
+ allocator: kCFAllocatorDefault,
+ memoryBlock: UnsafeMutableRawPointer(mutating: buffer.baseAddress),
+ blockLength: data.count,
+ blockAllocator: kCFAllocatorNull,
+ customBlockSource: nil,
+ offsetToData: 0,
+ dataLength: data.count,
+ flags: 0,
+ blockBufferOut: &blockBuffer
+ )
+ }
+ guard let blockBuffer else {
+ return nil
+ }
+ var timing = CMSampleTimingInfo(
+ duration: .invalid,
+ presentationTimeStamp: .invalid,
+ decodeTimeStamp: .invalid
+ )
+ var sampleBuffer: CMSampleBuffer?
+ let sampleStatus = CMSampleBufferCreateReady(
+ allocator: kCFAllocatorDefault,
+ dataBuffer: blockBuffer,
+ formatDescription: nil,
+ sampleCount: 1,
+ sampleTimingEntryCount: 1,
+ sampleTimingArray: &timing,
+ sampleSizeEntryCount: 1,
+ sampleSizeArray: [data.count],
+ sampleBufferOut: &sampleBuffer
+ )
+ guard sampleStatus == noErr else {
+ return nil
+ }
+ return sampleBuffer
+ }
+
+ static func makeSampleBuffer(width: Int, height: Int) -> CMSampleBuffer? {
+ var pixelBuffer: CVPixelBuffer?
+ CVPixelBufferCreate(nil, width, height, kCVPixelFormatType_32BGRA, nil, &pixelBuffer)
+ guard let pixelBuffer else {
+ return nil
+ }
+ var outputFormat: CMFormatDescription?
+ CMVideoFormatDescriptionCreateForImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: pixelBuffer,
+ formatDescriptionOut: &outputFormat
+ )
+ guard let outputFormat else {
+ return nil
+ }
+ var timingInfo = CMSampleTimingInfo()
+ var sampleBuffer: CMSampleBuffer?
+ guard CMSampleBufferCreateReadyWithImageBuffer(
+ allocator: kCFAllocatorDefault,
+ imageBuffer: pixelBuffer,
+ formatDescription: outputFormat,
+ sampleTiming: &timingInfo,
+ sampleBufferOut: &sampleBuffer
+ ) == noErr else {
+ return nil
+ }
+ return sampleBuffer
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift
new file mode 100644
index 000000000..58d4f40b6
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift
@@ -0,0 +1,20 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioCodecSettingsFormatTests {
+ @Test func opus_sampleRate() {
+ #expect(AudioCodecSettings.Format.opus.makeSampleRate(49000, output: 0) == 48000.0)
+ #expect(AudioCodecSettings.Format.opus.makeSampleRate(44100, output: 0) == 48000.0)
+ #expect(AudioCodecSettings.Format.opus.makeSampleRate(20000, output: 0) == 16000.0)
+ #expect(AudioCodecSettings.Format.opus.makeSampleRate(1000, output: 0) == 8000.0)
+ }
+
+ @Test func aac_sampleRate() {
+ #expect(AudioCodecSettings.Format.aac.makeSampleRate(48000, output: 44100) == 44100.0)
+ #expect(AudioCodecSettings.Format.aac.makeSampleRate(44100, output: 0) == 44100.0)
+ #expect(AudioCodecSettings.Format.aac.makeSampleRate(20000, output: 0) == 20000.0)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift
new file mode 100644
index 000000000..42a1d3620
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift
@@ -0,0 +1,115 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioCodecTests {
+ @Test func aac_44100hz_step_1024() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100, numSamples: 1024) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 44100)
+ }
+
+ @Test func aac_48000hz_step_1024() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(48000.0, numSamples: 1024) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 48000)
+ }
+
+ @Test func aac_24000hz_step_1024() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(24000.0, numSamples: 1024) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 24000)
+ }
+
+ @Test func aac_16000hz_step_1024() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(16000.0, numSamples: 1024) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 16000)
+ }
+
+ @Test func aac_8000hz_step_256() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(8000.0, numSamples: 256) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 8000)
+ }
+
+ @Test func aac_8000hz_step_960() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(8000.0, numSamples: 960) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 8000)
+ }
+
+ @Test func aac_44100hz_step_1224() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: 1224) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ }
+
+ @Test func aac_1_channel_to_2_channel() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.settings = .init(downmix: false, channelMap: [0, 0])
+ encoder.startRunning()
+ for _ in 0..<10 {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: 1024) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.channelCount == 2)
+ }
+
+ @Test func aac_44100_any_steps() {
+ let numSamples: [Int] = [1024, 1024, 1028, 1024, 1028, 1028, 962, 962, 960, 2237, 2236]
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ for numSample in numSamples {
+ if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: numSample) {
+ encoder.append(sampleBuffer, when: .init())
+ }
+ }
+ #expect(encoder.outputFormat?.sampleRate == 44100)
+ }
+
+ @Test func test3Channel_withoutCrash() {
+ let encoder = HaishinKit.AudioCodec()
+ encoder.startRunning()
+ if let sampleBuffer = CMAudioSampleBufferFactory.makeSilence(44100, numSamples: 256, channels: 3) {
+ encoder.append(sampleBuffer)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift
new file mode 100644
index 000000000..2839f7d4a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift
@@ -0,0 +1,21 @@
+import CoreMedia
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct CMSampleBufferExtensionTests {
+ @Test func isNotSync() {
+ if let video1 = CMVideoSampleBufferFactory.makeSampleBuffer(width: 100, height: 100) {
+ video1.sampleAttachments[0][.notSync] = 1
+ } else {
+ Issue.record()
+ }
+
+ if let video2 = CMVideoSampleBufferFactory.makeSampleBuffer(width: 100, height: 100) {
+ #expect(!video2.isNotSync)
+ } else {
+ Issue.record()
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift
new file mode 100644
index 000000000..096772b27
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift
@@ -0,0 +1,30 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct ExpressibleByIntegerLiteralTests {
+ @Test func int32() {
+ #expect(Int32.min.bigEndian.data == Data([128, 0, 0, 0]))
+ #expect(Int32(32).bigEndian.data == Data([0, 0, 0, 32]))
+ #expect(Int32.max.bigEndian.data == Data([127, 255, 255, 255]))
+ }
+
+ @Test func uint32() {
+ #expect(UInt32.min.bigEndian.data == Data([0, 0, 0, 0]))
+ #expect(UInt32(32).bigEndian.data == Data([0, 0, 0, 32]))
+ #expect(UInt32.max.bigEndian.data == Data([255, 255, 255, 255]))
+ }
+
+ @Test func int64() {
+ #expect(Int64.min.bigEndian.data == Data([128, 0, 0, 0, 0, 0, 0, 0]))
+ #expect(Int64(32).bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 32]))
+ #expect(Int64.max.bigEndian.data == Data([127, 255, 255, 255, 255, 255, 255, 255]))
+ }
+
+ @Test func uint64() {
+ #expect(UInt64.min.bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 0]))
+ #expect(UInt64(32).bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 32]))
+ #expect(UInt64.max.bigEndian.data == Data([255, 255, 255, 255, 255, 255, 255, 255]))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift
new file mode 100644
index 000000000..c1ef797bf
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift
@@ -0,0 +1,11 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct SwiftCoreExtensionTests {
+ @Test func int32() {
+ #expect(Int32.min == Int32(data: Int32.min.data))
+ #expect(Int32.max == Int32(data: Int32.max.data))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift
new file mode 100644
index 000000000..5554603a4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift
@@ -0,0 +1,11 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct ADTSHeaderTests {
+ @Test func bytes() {
+ let data = Data([255, 241, 77, 128, 112, 127, 252, 1])
+ _ = ADTSHeader(data: data)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift
new file mode 100644
index 000000000..9f87f290c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift
@@ -0,0 +1,14 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioSpecificConfigTests {
+ @Test func bytes() {
+ #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz48000, channel: .frontCenter).bytes == [0b00001001, 0b10001000])
+ #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz44100, channel: .frontCenter).bytes == [0b00001010, 0b00001000])
+ #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz24000, channel: .frontCenter).bytes == [0b00001011, 0b00001000])
+ #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz16000, channel: .frontCenter).bytes == [0b00001100, 0b00001000])
+ #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz8000, channel: .frontCenter).bytes == [0b00001101, 0b10001000])
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift
new file mode 100644
index 000000000..f835d6e3d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift
@@ -0,0 +1,36 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct ISOTypeBufferUtilTests {
+ @Test func toNALFileFormat_4() {
+ var data = Data([0, 0, 0, 1, 10, 10, 0, 0, 0, 1, 3, 3, 2, 0, 0, 0, 1, 5, 5, 5])
+ ISOTypeBufferUtil.toNALFileFormat(&data)
+ #expect(data.bytes == Data([0, 0, 0, 2, 10, 10, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 3, 5, 5, 5]).bytes)
+ }
+
+ @Test func toNALFileFormat_3() {
+ var data = Data([0, 0, 1, 10, 10, 0, 0, 1, 3, 3, 2, 0, 0, 1, 5, 5, 5])
+ ISOTypeBufferUtil.toNALFileFormat(&data)
+ #expect(data.bytes == Data([0, 0, 2, 10, 10, 0, 0, 3, 3, 3, 2, 0, 0, 3, 5, 5, 5]).bytes)
+ }
+
+ @Test func toNALFileFormat() {
+ let expected = Data([0, 0, 1, 17, 33, 248, 224, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159])
+ var data = expected
+ data[0] = 0
+ data[1] = 0
+ data[2] = 0
+ data[3] = 1
+ ISOTypeBufferUtil.toNALFileFormat(&data)
+ #expect(data.bytes == expected.bytes)
+ }
+
+ @Test func toNALFileFormat_3video() {
+ var data = Data([0, 0, 1, 33, 254, 120, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 70, 125, 76, 125, 95, 250, 168, 44, 255, 85, 5, 159, 234, 160, 160, 250, 147, 253, 84, 22, 127, 170, 130, 195, 235, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 8, 143, 168, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 34, 62, 162, 191, 213, 65, 17, 248, 175, 245, 80, 153, 248, 103, 253, 84, 17, 31, 81, 95, 234, 160, 179, 253, 84, 16, 31, 148, 250, 159, 253, 84, 16, 31, 140, 255, 85, 4, 71, 226, 191, 213, 65, 89, 255, 253, 84, 16, 31, 140, 255, 85, 5, 159, 234, 160, 179, 253, 84, 50, 125, 103, 225, 47, 245, 80, 89, 254, 170, 29, 63, 31, 254, 170, 11, 63, 213, 65, 17, 245, 21, 254, 170, 27, 63, 16, 125, 68, 64, 201, 255, 213, 65, 81, 245, 95, 234, 161, 243, 234, 52, 87, 245, 80, 225, 245, 8, 127, 170, 130, 207, 245, 80, 86, 127, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 67, 199, 212, 199, 226, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 21, 31, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 39, 213, 255, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 224])
+
+ ISOTypeBufferUtil.toNALFileFormat(&data)
+ #expect(data.bytes == Data([0, 1, 73, 33, 254, 120, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 70, 125, 76, 125, 95, 250, 168, 44, 255, 85, 5, 159, 234, 160, 160, 250, 147, 253, 84, 22, 127, 170, 130, 195, 235, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 8, 143, 168, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 34, 62, 162, 191, 213, 65, 17, 248, 175, 245, 80, 153, 248, 103, 253, 84, 17, 31, 81, 95, 234, 160, 179, 253, 84, 16, 31, 148, 250, 159, 253, 84, 16, 31, 140, 255, 85, 4, 71, 226, 191, 213, 65, 89, 255, 253, 84, 16, 31, 140, 255, 85, 5, 159, 234, 160, 179, 253, 84, 50, 125, 103, 225, 47, 245, 80, 89, 254, 170, 29, 63, 31, 254, 170, 11, 63, 213, 65, 17, 245, 21, 254, 170, 27, 63, 16, 125, 68, 64, 201, 255, 213, 65, 81, 245, 95, 234, 161, 243, 234, 52, 87, 245, 80, 225, 245, 8, 127, 170, 130, 207, 245, 80, 86, 127, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 67, 199, 212, 199, 226, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 21, 31, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 39, 213, 255, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 224]).bytes)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift
new file mode 100644
index 000000000..810fa305d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift
@@ -0,0 +1,29 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct NALUnitReaderTests {
+ @Test func `h264 read slice`() {
+ let data = Data([0, 0, 0, 112, 33, 251, 108, 120, 54, 255, 85, 6, 191, 234, 160, 215, 253, 84, 26, 255, 170, 131, 95, 245, 80, 107, 254, 170, 13, 127, 213, 65, 175, 250, 168, 53, 255, 85, 6, 191, 234, 160, 215, 253, 84, 26, 255, 170, 131, 83, 239, 234, 160, 215, 253, 84, 26, 255, 170, 131, 95, 245, 80, 106, 127, 234, 160, 215, 253, 84, 26, 159, 250, 168, 53, 255, 85, 6, 71, 236, 251, 250, 168, 53, 255, 85, 6, 98, 58, 63, 245, 80, 107, 254, 170, 13, 127, 213, 65, 175, 250, 168, 53, 255, 85, 6, 191, 234, 160, 204, 253, 127, 170, 131, 95, 128])
+ let buffer = CMVideoSampleBufferFactory.makeSampleBuffer(data)
+ let reader = NALUnitReader()
+ if let buffer {
+ let nals = reader.read(buffer)
+ #expect(nals.count == 1)
+ #expect(nals[0][0] == 33)
+ }
+ }
+
+ @Test func `h264 read idr`() {
+ let data = Data([0, 0, 0, 30, 6, 5, 26, 71, 86, 74, 220, 92, 76, 67, 63, 148, 239, 197, 17, 60, 209, 67, 168, 1, 255, 204, 204, 255, 2, 0, 4, 0, 0, 128, 0, 0, 4, 202, 37, 184, 32, 0, 147, 255, 255, 225, 232, 160, 251, 221, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 110, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 188])
+
+ let buffer = CMVideoSampleBufferFactory.makeSampleBuffer(data)
+ buffer?.isNotSync = false
+ let reader = NALUnitReader()
+ if let buffer {
+ let result = reader.read(buffer)
+ #expect(result.count == 2)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift
new file mode 100644
index 000000000..64f81c2de
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift
@@ -0,0 +1,16 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioDeviceUnitTests {
+ @Test func release() {
+ weak var weakDevice: AudioDeviceUnit?
+ _ = {
+ let device = try! AudioDeviceUnit(0, device: AVCaptureDevice.default(for: .audio)!)
+ weakDevice = device
+ }()
+ #expect(weakDevice == nil)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift
new file mode 100644
index 000000000..f23007ba2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift
@@ -0,0 +1,84 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioMixerByMultiTrackTests {
+ final class Result: AudioMixerDelegate {
+ var outputs: [AVAudioPCMBuffer] = []
+ var error: AudioMixerError?
+
+ func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ outputs.append(audioBuffer)
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) {
+ self.error = error
+ }
+ }
+
+ @Test func keep44100() {
+ let result = Result()
+ let mixer = AudioMixerByMultiTrack()
+ mixer.delegate = result
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ #expect(result.outputs.count == 2)
+ }
+
+ @Test func test44100to48000() {
+ let mixer = AudioMixerByMultiTrack()
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.settings = .init(
+ sampleRate: 48000, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 48000)
+ }
+
+ @Test func test48000_2ch() {
+ let result = Result()
+ let mixer = AudioMixerByMultiTrack()
+ mixer.delegate = result
+ mixer.settings = .init(
+ sampleRate: 48000, channels: 2
+ )
+ mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!)
+ #expect(mixer.outputFormat?.channelCount == 2)
+ #expect(mixer.outputFormat?.sampleRate == 48000)
+ mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!)
+ // #expect(result.outputs.count == 2)
+ // #expect(result.error == nil)
+ }
+
+ @Test func inputFormats() {
+ let mixer = AudioMixerByMultiTrack()
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ let inputFormats = mixer.inputFormats
+ #expect(inputFormats[0]?.sampleRate == 48000)
+ #expect(inputFormats[1]?.sampleRate == 44100)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift
new file mode 100644
index 000000000..552eb62b2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift
@@ -0,0 +1,112 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioMixerBySingleTrackTests {
+ final class Result: AudioMixerDelegate {
+ var outputs: [AVAudioPCMBuffer] = []
+
+ func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) {
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ outputs.append(audioBuffer)
+ }
+
+ func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) {
+ }
+ }
+
+ @Test func keep44100_1ch() {
+ let mixer = AudioMixerBySingleTrack()
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ }
+
+ @Test func test44100to48000_1ch() {
+ let mixer = AudioMixerBySingleTrack()
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.settings = .init(
+ sampleRate: 48000, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 48000)
+ }
+
+ @Test func test44100to48000_4ch_2ch() {
+ let result = Result()
+ let mixer = AudioMixerBySingleTrack()
+ mixer.delegate = result
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 0
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 4)!)
+ #expect(mixer.outputFormat?.channelCount == 2)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.settings = .init(
+ sampleRate: 48000, channels: 0
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!)
+ #expect(mixer.outputFormat?.channelCount == 2)
+ #expect(mixer.outputFormat?.sampleRate == 48000)
+ #expect(result.outputs.count == 2)
+ }
+
+ @Test func test44100to48000_4ch() {
+ let result = Result()
+ let mixer = AudioMixerBySingleTrack()
+ mixer.delegate = result
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 0
+ )
+ mixer.settings.maximumNumberOfChannels = 4
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 4)!)
+ #expect(mixer.outputFormat?.channelCount == 4)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ mixer.settings = .init(
+ sampleRate: 48000, channels: 0
+ )
+ mixer.settings.maximumNumberOfChannels = 4
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!)
+ #expect(mixer.outputFormat?.channelCount == 4)
+ #expect(mixer.outputFormat?.sampleRate == 48000)
+ #expect(result.outputs.count == 2)
+ }
+
+ @Test func passthrough16000_48000() {
+ let mixer = AudioMixerBySingleTrack()
+ mixer.settings = .init(
+ sampleRate: 0, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 16000)
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(mixer.outputFormat?.sampleRate == 44100)
+ }
+
+ @Test func inputFormats() {
+ let mixer = AudioMixerBySingleTrack()
+ mixer.settings = .init(
+ sampleRate: 44100, channels: 1
+ )
+ mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ let inputFormats = mixer.inputFormats
+ #expect(inputFormats[0]?.sampleRate == 48000)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift
new file mode 100644
index 000000000..12f28c7ad
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift
@@ -0,0 +1,64 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite final class AudioMixerTrackTests {
+ @Test func keep16000() {
+ let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 16000, channels: 1, interleaved: true)!
+ let track = AudioMixerTrack(id: 0, outputFormat: format)
+ track.delegate = self
+ track.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(track.outputFormat.sampleRate == 16000)
+ track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(track.outputFormat.sampleRate == 16000)
+ }
+
+ @Test func keep44100() {
+ let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44100, channels: 1, interleaved: true)!
+ let resampler = AudioMixerTrack(id: 0, outputFormat: format)
+ resampler.delegate = self
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ #expect(resampler.outputFormat.sampleRate == 44100)
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(resampler.outputFormat.sampleRate == 44100)
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ #expect(resampler.outputFormat.sampleRate == 44100)
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024 * 20, channels: 1)!)
+ #expect(resampler.outputFormat.sampleRate == 44100)
+ }
+
+ @Test func keep48000() {
+ let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: true)!
+ let track = AudioMixerTrack(id: 0, outputFormat: format)
+ track.delegate = self
+ track.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024 * 2, channels: 1)!)
+ }
+
+ @Test func passthrough48000_44100() {
+ let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44000, channels: 1, interleaved: true)!
+ let resampler = AudioMixerTrack(id: 0, outputFormat: format)
+ resampler.delegate = self
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(44000, numSamples: 1024, channels: 1)!)
+ resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!)
+ }
+
+ @Test func passthrough16000_48000() {
+ let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: true)!
+ let track = AudioMixerTrack(id: 0, outputFormat: format)
+ track.delegate = self
+ track.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!)
+ #expect(track.outputFormat.sampleRate == 48000)
+ track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!)
+ }
+}
+
+extension AudioMixerTrackTests: AudioMixerTrackDelegate {
+ func track(_ track: HaishinKit.AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ }
+
+ func track(_ track: HaishinKit.AudioMixerTrack, errorOccurred error: HaishinKit.AudioMixerError) {
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift
new file mode 100644
index 000000000..245b2e6d2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift
@@ -0,0 +1,85 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct AudioRingBufferTests {
+ @Test func monoAppendSampleBuffer_920() throws {
+ try appendSampleBuffer(920, channels: 1)
+ }
+
+ @Test func monoAppendSampleBuffer_1024() throws {
+ try appendSampleBuffer(1024, channels: 1)
+ }
+
+ @Test func monoAppendSampleBuffer_overrun() throws {
+ let numSamples = 1024 * 4
+ var asbd = AudioStreamBasicDescription(
+ mSampleRate: 44100,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: 0xc,
+ mBytesPerPacket: 2,
+ mFramesPerPacket: 1,
+ mBytesPerFrame: 2,
+ mChannelsPerFrame: 1,
+ mBitsPerChannel: 16,
+ mReserved: 0
+ )
+ let format = AVAudioFormat(streamDescription: &asbd)
+ let buffer = AudioRingBuffer(format!, bufferCounts: 3) // 1024 * 3
+ guard
+ let readBuffer = AVAudioPCMBuffer(pcmFormat: AVAudioFormat(streamDescription: &asbd)!, frameCapacity: AVAudioFrameCount(1024)),
+ let sinWave = CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: numSamples, channels: 1) else {
+ return
+ }
+ buffer?.append(sinWave)
+ #expect(buffer?.isDataAvailable(1024) == true)
+ #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr)
+ #expect(buffer?.isDataAvailable(1024) == true)
+ #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr)
+ #expect(buffer?.isDataAvailable(1024) == true)
+ #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr)
+ #expect(buffer?.isDataAvailable(1024) == true)
+ #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr)
+ #expect(buffer?.isDataAvailable(1024) == false)
+ #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) != noErr)
+ }
+
+ @Test func stereoAppendSampleBuffer_920() throws {
+ try appendSampleBuffer(920, channels: 2)
+ }
+
+ @Test func stereoAppendSampleBuffer_1024() throws {
+ try appendSampleBuffer(1024, channels: 2)
+ }
+
+ private func appendSampleBuffer(_ numSamples: Int, channels: UInt32) throws {
+ var asbd = AudioStreamBasicDescription(
+ mSampleRate: 44100,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: 0xc,
+ mBytesPerPacket: 2 * channels,
+ mFramesPerPacket: 1,
+ mBytesPerFrame: 2 * channels,
+ mChannelsPerFrame: channels,
+ mBitsPerChannel: 16,
+ mReserved: 0
+ )
+ let format = AVAudioFormat(streamDescription: &asbd)
+ let buffer = AudioRingBuffer(format!, bufferCounts: 3)
+ guard
+ let readBuffer = AVAudioPCMBuffer(pcmFormat: AVAudioFormat(streamDescription: &asbd)!, frameCapacity: AVAudioFrameCount(numSamples)),
+ let sinWave = CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: numSamples, channels: channels) else {
+ return
+ }
+ let bufferList = UnsafeMutableAudioBufferListPointer(readBuffer.mutableAudioBufferList)
+ readBuffer.frameLength = AVAudioFrameCount(numSamples)
+ for _ in 0..<30 {
+ buffer?.append(sinWave)
+ readBuffer.int16ChannelData?[0].update(repeating: 0, count: numSamples)
+ #expect(buffer?.render(UInt32(numSamples), ioData: readBuffer.mutableAudioBufferList) == noErr)
+ #expect(try sinWave.dataBuffer?.dataBytes().bytes == Data(bytes: bufferList[0].mData!, count: numSamples * Int(channels) * 2).bytes)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift
new file mode 100644
index 000000000..b7ba11f2c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift
@@ -0,0 +1,53 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite(.disabled(if: TestEnvironment.isCI))
+struct MediaMixerTests {
+ @Test func videoConfiguration() async throws {
+ let mixer = MediaMixer()
+ await #expect(throws: (MediaMixer.Error).self) {
+ try await mixer.configuration(video: 0) { _ in }
+ }
+ try await mixer.attachVideo(AVCaptureDevice.default(for: .video), track: 0) { unit in
+ #expect(throws: (any Error).self) {
+ try unit.setFrameRate(60)
+ }
+ }
+ try await mixer.configuration(video: 0) { _ in }
+ }
+
+ @Test func release() async {
+ weak var weakMixer: MediaMixer?
+ _ = await {
+ let mixer = MediaMixer(captureSessionMode: .manual)
+ await mixer.startRunning()
+ try? await Task.sleep(nanoseconds: 1)
+ await mixer.stopRunning()
+ try? await Task.sleep(nanoseconds: 1)
+ weakMixer = mixer
+ }()
+ #expect(weakMixer == nil)
+ }
+
+ @Test func release_with_multimode() async {
+ weak var weakMixer: MediaMixer?
+ _ = await {
+ let mixer = MediaMixer(captureSessionMode: .multi)
+ await mixer.startRunning()
+ try? await Task.sleep(nanoseconds: 1)
+ await mixer.stopRunning()
+ try? await Task.sleep(nanoseconds: 1)
+ weakMixer = mixer
+ }()
+ #expect(weakMixer == nil)
+ }
+
+ @Test func currentFrameRate() async throws {
+ let mixer = MediaMixer()
+ try await mixer.setFrameRate(60)
+ #expect(await mixer.frameRate == 60)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift
new file mode 100644
index 000000000..7914f4194
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift
@@ -0,0 +1,19 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct VideoDeviceUnitTests {
+ @Test func release() {
+ weak var weakDevice: VideoDeviceUnit?
+ _ = {
+ guard let videoDevice = AVCaptureDevice.default(for: .video) else {
+ return
+ }
+ let device = try? VideoDeviceUnit(0, device: videoDevice)
+ weakDevice = device
+ }()
+ #expect(weakDevice == nil)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift
new file mode 100644
index 000000000..3f08df970
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift
@@ -0,0 +1,29 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@ScreenActor
+@Suite struct ScreenObjectContainerTests {
+ @Test func lookUpVideoTrackScreenObject() {
+ let container1 = ScreenObjectContainer()
+
+ let videoTrack1 = VideoTrackScreenObject()
+ let videoTrack2 = VideoTrackScreenObject()
+
+ try? container1.addChild(videoTrack1)
+ try? container1.addChild(videoTrack2)
+
+ let videoTracks1 = container1.getScreenObjects() as [VideoTrackScreenObject]
+ #expect(videoTracks1.count == 2)
+
+ let container2 = ScreenObjectContainer()
+ let videoTrack3 = VideoTrackScreenObject()
+ try? container2.addChild(videoTrack3)
+ try? container1.addChild(container2)
+
+ let videoTracks2 = container1.getScreenObjects() as [VideoTrackScreenObject]
+ #expect(videoTracks2.count == 3)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift
new file mode 100644
index 000000000..b9aa7ee0a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift
@@ -0,0 +1,96 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@ScreenActor
+@Suite struct ScreenObjectTests {
+ @Test func screenHorizontalAlignmentRect() {
+ let screen = Screen()
+
+ let object1 = ScreenObject()
+ object1.size = .init(width: 100, height: 100)
+ object1.horizontalAlignment = .left
+
+ let object2 = ScreenObject()
+ object2.size = .init(width: 100, height: 100)
+ object2.horizontalAlignment = .center
+
+ let object3 = ScreenObject()
+ object3.size = .init(width: 100, height: 100)
+ object3.horizontalAlignment = .right
+
+ try? screen.addChild(object1)
+ try? screen.addChild(object2)
+ try? screen.addChild(object3)
+
+ if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) {
+ _ = screen.render(sampleBuffer)
+ }
+ #expect(object1.bounds == .init(origin: .zero, size: object1.size))
+ #expect(object2.bounds == .init(x: 750, y: 0, width: 100, height: 100))
+ #expect(object3.bounds == .init(x: 1500, y: 0, width: 100, height: 100))
+ }
+
+ @Test func screenVerticalAlignmentRect() {
+ let screen = Screen()
+
+ let object0 = ScreenObject()
+ object0.size = .zero
+ object0.verticalAlignment = .top
+
+ let object1 = ScreenObject()
+ object1.size = .init(width: 100, height: 100)
+ object1.verticalAlignment = .top
+
+ let object2 = ScreenObject()
+ object2.size = .init(width: 100, height: 100)
+ object2.verticalAlignment = .middle
+
+ let object3 = ScreenObject()
+ object3.size = .init(width: 100, height: 100)
+ object3.verticalAlignment = .bottom
+
+ try? screen.addChild(object0)
+ try? screen.addChild(object1)
+ try? screen.addChild(object2)
+ try? screen.addChild(object3)
+
+ if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) {
+ _ = screen.render(sampleBuffer)
+ }
+ #expect(object0.bounds == .init(x: 0, y: 0, width: 1600, height: 900))
+ #expect(object1.bounds == .init(x: 0, y: 0, width: object1.size.width, height: object1.size.height))
+ #expect(object2.bounds == .init(x: 0, y: 400, width: 100, height: 100))
+ #expect(object3.bounds == .init(x: 0, y: 800, width: 100, height: 100))
+ }
+
+ @Test func screenWithContainerTests() {
+ let screen = Screen()
+
+ let container = ScreenObjectContainer()
+ container.size = .init(width: 200, height: 100)
+ container.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0)
+
+ let object0 = ScreenObject()
+ object0.size = .zero
+ object0.verticalAlignment = .top
+
+ let object1 = ScreenObject()
+ object1.size = .init(width: 100, height: 100)
+ object1.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0)
+ object1.verticalAlignment = .top
+
+ try? container.addChild(object0)
+ try? container.addChild(object1)
+ try? screen.addChild(container)
+
+ if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) {
+ _ = screen.render(sampleBuffer)
+ }
+
+ #expect(object0.bounds == .init(x: 16, y: 16, width: 200, height: 100))
+ #expect(object1.bounds == .init(x: 32, y: 32, width: 100, height: 100))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift
new file mode 100644
index 000000000..8460fa056
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift
@@ -0,0 +1,42 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@ScreenActor
+@Suite struct VideoTrackObjectContainerTests {
+ @Test func horizontalAlignmentBounds() {
+ let screen = Screen()
+
+ let object1 = VideoTrackScreenObject()
+ object1.videoGravity = .resizeAspect
+ object1.size = .init(width: 160, height: 90)
+ object1.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!)
+ object1.horizontalAlignment = .left
+
+ let object2 = VideoTrackScreenObject()
+ object2.videoGravity = .resizeAspect
+ object2.size = .init(width: 160, height: 90)
+ object2.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!)
+ object2.horizontalAlignment = .center
+
+ let object3 = VideoTrackScreenObject()
+ object3.videoGravity = .resizeAspect
+ object3.size = .init(width: 160, height: 90)
+ object3.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!)
+ object3.horizontalAlignment = .right
+
+ try? screen.addChild(object1)
+ try? screen.addChild(object2)
+ try? screen.addChild(object3)
+
+ if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) {
+ _ = screen.render(sampleBuffer)
+ }
+
+ #expect(object1.bounds == .init(x: 0, y: 0, width: 50.625, height: 90))
+ #expect(object2.bounds == .init(x: 774.6875, y: 0, width: 50.625, height: 90))
+ #expect(object3.bounds == .init(x: 1549.375, y: 0, width: 50.625, height: 90))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift
new file mode 100644
index 000000000..f1054e97a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift
@@ -0,0 +1,49 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct StreamRecorderTests {
+ @Test func startRunning_nil() async throws {
+ let recorder = StreamRecorder()
+ try await recorder.startRecording(nil)
+ let moviesDirectory = await recorder.moviesDirectory
+ // $moviesDirectory/B644F60F-0959-4F54-9D14-7F9949E02AD8.mp4
+ #expect(((await recorder.outputURL?.path.contains(moviesDirectory.path())) != nil))
+ }
+
+ @Test func startRunning_fileName() async throws {
+ let recorder = StreamRecorder()
+ try? await recorder.startRecording(URL(string: "dir/sample.mp4"))
+ _ = await recorder.moviesDirectory
+ // $moviesDirectory/dir/sample.mp4
+ #expect(((await recorder.outputURL?.path.contains("dir/sample.mp4")) != nil))
+ }
+
+ @Test func startRunning_fullPath() async {
+ let recorder = StreamRecorder()
+ let fullPath = await recorder.moviesDirectory.appendingPathComponent("sample.mp4")
+ // $moviesDirectory/sample.mp4
+ try? await recorder.startRecording(fullPath)
+ #expect(await recorder.outputURL == fullPath)
+ }
+
+ @Test func startRunning_dir() async {
+ let recorder = StreamRecorder()
+ try? await recorder.startRecording(URL(string: "dir"))
+ // $moviesDirectory/dir/33FA7D32-E0A8-4E2C-9980-B54B60654044.mp4
+ #expect(((await recorder.outputURL?.path.contains("dir")) != nil))
+ }
+
+ @Test func startRunning_fileAlreadyExists() async {
+ let recorder = StreamRecorder()
+ let filePath = await recorder.moviesDirectory.appendingPathComponent("duplicate-file.mp4")
+ FileManager.default.createFile(atPath: filePath.path, contents: nil)
+ do {
+ try await recorder.startRecording(filePath)
+ fatalError()
+ } catch {
+ try? FileManager.default.removeItem(atPath: filePath.path)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift
new file mode 100644
index 000000000..413c05d93
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift
@@ -0,0 +1,7 @@
+import Foundation
+
+enum TestEnvironment {
+ static var isCI: Bool {
+ ProcessInfo.processInfo.environment["CI"] == "true"
+ }
+}
diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift
new file mode 100644
index 000000000..2c02973fe
--- /dev/null
+++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift
@@ -0,0 +1,125 @@
+import Foundation
+import Testing
+
+@testable import HaishinKit
+
+@Suite struct ByteArrayTests {
+ @Test func int8() throws {
+ let bytes = ByteArray()
+ bytes.writeInt8(Int8.min)
+ bytes.writeInt8(0)
+ bytes.writeInt8(Int8.max)
+ #expect(bytes.position == ByteArray.sizeOfInt8 * 3)
+ bytes.position = 0
+ #expect(try bytes.readInt8() == Int8.min)
+ #expect(try bytes.readInt8() == 0 )
+ #expect(try bytes.readInt8() == Int8.max)
+ }
+
+ @Test func uint8() throws {
+ let bytes = ByteArray()
+ bytes.writeUInt8(UInt8.min)
+ bytes.writeUInt8(0)
+ bytes.writeUInt8(UInt8.max)
+ #expect(bytes.position == ByteArray.sizeOfInt8 * 3)
+ bytes.position = 0
+ #expect(try bytes.readUInt8() == UInt8.min)
+ #expect(try bytes.readUInt8() == 0)
+ #expect(try bytes.readUInt8() == UInt8.max)
+ }
+
+ @Test func int16() throws {
+ let bytes = ByteArray()
+ bytes.writeInt16(Int16.min)
+ bytes.writeInt16(0)
+ bytes.writeInt16(Int16.max)
+ print(bytes)
+ bytes.position = 0
+ #expect(try bytes.readInt16() == Int16.min)
+ #expect(try bytes.readInt16() == 0)
+ #expect(try bytes.readInt16() == Int16.max)
+ }
+
+ @Test func uint16() throws {
+ let bytes = ByteArray()
+ bytes.writeUInt16(UInt16.min)
+ bytes.writeUInt16(0)
+ bytes.writeUInt16(UInt16.max)
+ bytes.position = 0
+ #expect(try bytes.readUInt16() == UInt16.min)
+ #expect(try bytes.readUInt16() == 0)
+ #expect(try bytes.readUInt16() == UInt16.max)
+ }
+
+ @Test func uint24() throws {
+ let bytes = ByteArray()
+ bytes.writeUInt24(0xFFFFFF)
+ bytes.position = 0
+ #expect(try bytes.readUInt24() == 0xFFFFFF)
+ }
+
+ @Test func uint32() throws {
+ let bytes = ByteArray()
+ bytes.writeUInt32(UInt32.min)
+ bytes.writeUInt32(0)
+ bytes.writeUInt32(UInt32.max)
+ bytes.position = 0
+ #expect(try bytes.readUInt32() == UInt32.min)
+ #expect(try bytes.readUInt32() == 0)
+ #expect(try bytes.readUInt32() == UInt32.max)
+ }
+
+ @Test func int32() throws {
+ let bytes = ByteArray()
+ bytes.writeInt32(Int32.min)
+ bytes.writeInt32(0)
+ bytes.writeInt32(Int32.max)
+ bytes.position = 0
+ #expect(try bytes.readInt32() == Int32.min)
+ #expect(try bytes.readInt32() == 0)
+ #expect(try bytes.readInt32() == Int32.max)
+ }
+
+ @Test func float() throws {
+ let bytes = ByteArray()
+ bytes.writeFloat(Float.infinity)
+ #expect(bytes.position == ByteArray.sizeOfFloat)
+ bytes.position = 0
+ #expect(try bytes.readFloat() == Float.infinity)
+ }
+
+ @Test func double() throws {
+ let bytes = ByteArray()
+ bytes.writeDouble(.pi)
+ #expect(bytes.position == ByteArray.sizeOfDouble)
+ bytes.position = 0
+ #expect(try bytes.readDouble() == Double.pi)
+ bytes.clear()
+ bytes.writeDouble(Double.infinity)
+ bytes.position = 0
+ #expect(try bytes.readDouble() == Double.infinity)
+ }
+
+ @Test func utf8() throws {
+ let bytes = ByteArray()
+ do {
+ try bytes.writeUTF8("hello world!!")
+ } catch {
+ Issue.record()
+ }
+
+ let length: Int = bytes.position
+ bytes.position = 0
+ #expect(try bytes.readUTF8() == "hello world!!")
+ bytes.position = 0
+
+ var raiseError = false
+ do {
+ let _: String = try bytes.readUTF8Bytes(length + 10)
+ } catch {
+ raiseError = true
+ }
+
+ #expect(raiseError)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/LICENSE.md b/Vendor/HaishinKit.swift/LICENSE.md
new file mode 100644
index 000000000..575a2ee36
--- /dev/null
+++ b/Vendor/HaishinKit.swift/LICENSE.md
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2015, shogo4405
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h b/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h
new file mode 100644
index 000000000..10e9d38c1
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h
@@ -0,0 +1,3 @@
+#import
+FOUNDATION_EXPORT double MoQTHaishinKitVersionNumber;
+FOUNDATION_EXPORT const unsigned char MoQTHaishinKitVersionString[];
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift
new file mode 100644
index 000000000..c1ce97a91
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift
@@ -0,0 +1,3 @@
+import Logboard
+
+nonisolated(unsafe) let logger = LBLogger.with("com.haishinkit.SRTHaishinKit")
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift
new file mode 100644
index 000000000..726036329
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift
@@ -0,0 +1,28 @@
+import Foundation
+import Logboard
+
+/// 6.21. ANNOUNCE
+public struct MoQTAnnounce: MoQTControlMessage {
+ public let type = MoQTMessageType.announce
+ public let trackNamespace: [String]
+ public let subscribeParameters: [MoQTVersionSpecificParameter]
+
+ public var payload: Data {
+ get throws {
+ var payload = MoQTPayload()
+ payload.putInt(trackNamespace.count)
+ for namespace in trackNamespace {
+ payload.putString(namespace)
+ }
+ payload.putInt(subscribeParameters.count)
+ for parameter in subscribeParameters {
+ do {
+ payload.putData(try parameter.payload)
+ } catch {
+ logger.info(error)
+ }
+ }
+ return payload.data
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift
new file mode 100644
index 000000000..db1db97d3
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift
@@ -0,0 +1,29 @@
+import Foundation
+import Logboard
+
+/// 6.21. ANNOUNCE
+public struct MoQTAnnounceError: MoQTControlMessage, Swift.Error {
+ public let type = MoQTMessageType.announceError
+ public let trackNamespace: [String]
+ public let code: Int
+ public let reasonPhrase: String
+
+ public var payload: Data {
+ get throws {
+ throw MoQTControlMessageError.notImplemented
+ }
+ }
+}
+
+extension MoQTAnnounceError {
+ init(_ payload: inout MoQTPayload) throws {
+ let trackNamespaceCounts = try payload.getInt()
+ var trackNamespace: [String] = .init()
+ for _ in 0.. (any MoQTControlMessage)? {
+ switch self {
+ case .subscribeUpdate:
+ return nil
+ case .subscribe:
+ return try MoQTSubscribe(&payload)
+ case .subscribeOk:
+ return try MoQTSubscribeOk(&payload)
+ case .subscribeError:
+ return try MoQTSubscribeError(&payload)
+ case .announce:
+ return nil
+ case .announceOk:
+ return try MoQTAnnounceOk(&payload)
+ case .announceError:
+ return try MoQTAnnounceError(&payload)
+ case .unannounce:
+ return nil
+ case .unsubscribe:
+ return nil
+ case .subscribeDone:
+ return nil
+ case .announceCancel:
+ return nil
+ case .trackStatusRequest:
+ return nil
+ case .trackStatus:
+ return nil
+ case .goaway:
+ return nil
+ case .subscribeAnnounuces:
+ return nil
+ case .subscribeAnnounucesOk:
+ return try MoQTSubscribeAnnouncesOk(&payload)
+ case .subscribeAnnounucesError:
+ return try MoQTSubscribeAnnouncesError(&payload)
+ case .clientSetup:
+ return nil
+ case .serverSetup:
+ return try MoQTServerSetup(&payload)
+ }
+ }
+}
+
+enum MoQTControlMessageError: Swift.Error {
+ case notImplemented
+}
+
+public protocol MoQTControlMessage: Sendable {
+ var type: MoQTMessageType { get }
+ var payload: Data { get throws }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift
new file mode 100644
index 000000000..f615c22a2
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift
@@ -0,0 +1,20 @@
+import Foundation
+
+public struct MoQTGoaway: MoQTControlMessage {
+ public let type: MoQTMessageType = .goaway
+ public let newSessionURI: String
+
+ public var payload: Data {
+ get throws {
+ var payload = MoQTPayload()
+ payload.putString(newSessionURI)
+ return payload.data
+ }
+ }
+}
+
+extension MoQTGoaway {
+ init(_ payload: inout MoQTPayload) throws {
+ newSessionURI = try payload.getString()
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift
new file mode 100644
index 000000000..8aa84853e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift
@@ -0,0 +1,29 @@
+import Foundation
+
+public struct MoQTServerSetup: MoQTControlMessage {
+ public let type: MoQTMessageType = .serverSetup
+ public let selectedVersion: Int
+ public let setupParameters: [MoQTSetupParameter]
+
+ public var payload: Data {
+ get throws {
+ throw MoQTControlMessageError.notImplemented
+ }
+ }
+}
+
+extension MoQTServerSetup {
+ init(_ payload: inout MoQTPayload) throws {
+ selectedVersion = try payload.getInt()
+ let setupParametersCounts = try payload.getInt()
+ var setupParameters: [MoQTSetupParameter] = .init()
+ for _ in 0...size)
+ }
+ }
+
+ init(data: Data) {
+ let diff: Int = MemoryLayout.size - data.count
+ if 0 < diff {
+ var buffer = Data(repeating: 0, count: diff)
+ buffer.append(data)
+ self = buffer.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee }
+ return
+ }
+ self = data.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee }
+ }
+
+ init(data: Slice) {
+ self.init(data: Data(data))
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift
new file mode 100644
index 000000000..0abfc0b39
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift
@@ -0,0 +1,12 @@
+import Network
+
+@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
+extension NWProtocolQUIC.Options {
+ func verifySelfCert() -> NWProtocolQUIC.Options {
+ let securityProtocolOptions: sec_protocol_options_t = self.securityProtocolOptions
+ sec_protocol_options_set_verify_block(securityProtocolOptions, { (_: sec_protocol_metadata_t, _: sec_trust_t, complete: @escaping sec_protocol_verify_complete_t) in
+ complete(true)
+ }, .main)
+ return self
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift
new file mode 100644
index 000000000..dab7a1b9a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift
@@ -0,0 +1,240 @@
+import Foundation
+
+@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
+public actor MoQTConnection {
+ public static let defaultPort = 4433
+ /// The supported protocols are moqt.
+ public static let supportedProtocols = ["moqt"]
+ /// The supported protocol versions.
+ public static let supportedVersions: [MoQTVersion] = [.draft07Exp2]
+ /// The default a control request time out value (ms).
+ public static let defaultRequestTimeout: UInt64 = 3000
+
+ /// The error domain code.
+ public enum Error: Swift.Error {
+ /// An invalid internal stare.
+ case invalidState
+ /// The command isn’t supported.
+ case unsupportedCommand(_ command: String)
+ /// The connected operation timed out.
+ case connectionTimedOut
+ /// The general socket error.
+ case socketErrorOccurred(_ error: any Swift.Error)
+ /// The requested operation timed out.
+ case requestTimedOut
+ case unknownResponse
+ }
+
+ public let role: MoQTSetupRole
+ /// The control message request timeout value. Defaul value is 500 msec.
+ public let requestTimeout: UInt64
+
+ public var objectStream: AsyncStream {
+ AsyncStream { continuation in
+ self.objectStreamContinuation = continuation
+ }
+ }
+
+ private var socket: MoQTSocket?
+ private var inputBuffer = MoQTPayload()
+ private var outputBuffer = MoQTPayload()
+ private var datagramBuffer = MoQTPayload()
+ private var continuation: CheckedContinuation?
+ private var currentTrackAlias = 0
+ private var currentSubscribeId = 0
+ private var objectStreamContinuation: AsyncStream.Continuation?
+
+ /// Creates a new connection.
+ public init(_ role: MoQTSetupRole, requestTimeOut: UInt64 = MoQTConnection.defaultRequestTimeout) {
+ self.role = .subscriber
+ self.requestTimeout = requestTimeOut
+ }
+
+ /// Creates a two-way connection to an application on MoQT Server.
+ public func connect(_ uri: String) async throws -> MoQTServerSetup {
+ guard let uri = URL(string: uri), let scheme = uri.scheme, let host = uri.host, Self.supportedProtocols.contains(scheme) else {
+ throw Error.unsupportedCommand(uri)
+ }
+ socket = .init()
+ guard let socket else {
+ throw Error.invalidState
+ }
+ do {
+ try await socket.connect(host, port: uri.port ?? Self.defaultPort)
+ Task {
+ for await data in await socket.incoming {
+ await didReceiveControlMessage(data)
+ }
+ }
+ Task {
+ for await data in await socket.datagram {
+ await didReceiveDataStream(data)
+ }
+ }
+ guard let serverSetup = try await send(MoQTClientSetup(supportedVersions: Self.supportedVersions, role: role, path: uri.path())) as? MoQTServerSetup else {
+ throw Error.unknownResponse
+ }
+ return serverSetup
+ } catch {
+ logger.error(error)
+ throw error
+ }
+ }
+
+ public func annouce(_ namespace: [String], authInfo: String?) async throws -> Result {
+ var subscribeParameters: [MoQTVersionSpecificParameter] = .init()
+ if let authInfo {
+ subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo))
+ }
+ let message = MoQTAnnounce(trackNamespace: namespace, subscribeParameters: subscribeParameters)
+ switch try await send(message) {
+ case let result as MoQTAnnounceOk:
+ return .success(result)
+ case let result as MoQTAnnounceError:
+ return .failure(result)
+ default:
+ throw Error.unknownResponse
+ }
+ }
+
+ public func subscribe(_ namespace: [String], name: String, authInfo: String? = nil) async throws -> Result {
+ defer {
+ currentTrackAlias += 1
+ currentSubscribeId += 1
+ }
+ var subscribeParameters: [MoQTVersionSpecificParameter] = .init()
+ if let authInfo {
+ subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo))
+ }
+ let message = MoQTSubscribe(
+ subscribeId: currentSubscribeId,
+ trackAlias: currentTrackAlias,
+ trackNamespace: namespace,
+ trackName: name,
+ subscribePriority: 0,
+ groupOrder: .descending,
+ filterType: .latestGroup,
+ startGroup: nil,
+ startObject: nil,
+ endGroup: nil,
+ endObject: nil,
+ subscribeParameters: subscribeParameters
+ )
+ switch try await send(message) {
+ case let result as MoQTSubscribeOk:
+ return .success(result)
+ case let result as MoQTSubscribeError:
+ return .failure(result)
+ default:
+ throw Error.unknownResponse
+ }
+ }
+
+ public func subscribeAnnouces(_ namespace: [String], authInfo: String? = nil) async throws -> Result {
+ var subscribeParameters: [MoQTVersionSpecificParameter] = .init()
+ if let authInfo {
+ subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo))
+ }
+ let message = MoQTSubscribeAnnounces(
+ trackNamespacePrefix: namespace,
+ parameters: subscribeParameters
+ )
+ switch try await send(message) {
+ case let result as MoQTSubscribeAnnouncesOk:
+ return .success(result)
+ case let result as MoQTSubscribeAnnouncesError:
+ return .failure(result)
+ default:
+ throw Error.unknownResponse
+ }
+ }
+
+ /// Closes the connection from the server.
+ public func close() async {
+ await socket?.close()
+ }
+
+ public func send(_ objects: [MoQTObject], header: MoQTStreamHeaderSubgroup) async throws {
+ var buffer = MoQTPayload()
+ buffer.putData(try header.payload)
+ for object in objects {
+ buffer.putData(try object.payload)
+ }
+ buffer.position = 0
+ await socket?.sendDatagram(buffer.data)
+ }
+
+ private func send(_ message: some MoQTControlMessage) async throws -> any MoQTControlMessage {
+ let content = try message.payload
+ outputBuffer.position = 0
+ outputBuffer.putInt(message.type.rawValue)
+ outputBuffer.putInt(content.count)
+ outputBuffer.putData(content)
+ return try await withCheckedThrowingContinuation { continutation in
+ self.continuation = continutation
+ Task {
+ try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000)
+ self.continuation.map {
+ $0.resume(throwing: Error.requestTimedOut)
+ }
+ self.continuation = nil
+ }
+ Task {
+ await socket?.send(outputBuffer.data)
+ }
+ }
+ }
+
+ private func didReceiveControlMessage(_ data: Data) async {
+ do {
+ inputBuffer.position = 0
+ inputBuffer.putData(data)
+ inputBuffer.position = 0
+ let type = try inputBuffer.getInt()
+ let length = try inputBuffer.getInt()
+ guard let message = try MoQTMessageType(rawValue: type)?.makeMessage(&inputBuffer) else {
+ _ = try? inputBuffer.getData(length)
+ continuation?.resume(throwing: MoQTControlMessageError.notImplemented)
+ continuation = nil
+ return
+ }
+ switch message {
+ case let message as MoQTSubscribe:
+ let ok = MoQTSubscribeOk(
+ subscribeId: currentSubscribeId,
+ expires: 0,
+ groupOrder: message.groupOrder,
+ contentExists: true,
+ largestGroupId: 0,
+ largestObjectId: 0,
+ subscribeParameters: message.subscribeParameters)
+ _ = try? await send(ok)
+ default:
+ continuation?.resume(returning: message)
+ continuation = nil
+ }
+ } catch {
+ logger.warn(error, data.bytes)
+ }
+ }
+
+ private func didReceiveDataStream(_ data: Data) async {
+ do {
+ datagramBuffer.position = 0
+ datagramBuffer.putData(data)
+ datagramBuffer.position = 0
+ let type = try datagramBuffer.getInt()
+ switch MoQTDataStreamType(rawValue: type) {
+ case .streamHeaderSubgroup:
+ _ = try MoQTStreamHeaderSubgroup(&datagramBuffer)
+ while 0 < datagramBuffer.bytesAvailable {
+ objectStreamContinuation?.yield(try .init(&datagramBuffer))
+ }
+ default:
+ break
+ }
+ } catch {
+ logger.warn(error)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift
new file mode 100644
index 000000000..5e0f817d5
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift
@@ -0,0 +1,133 @@
+import Foundation
+
+struct MoQTPayload {
+ private(set) var data = Data()
+
+ enum Error: Swift.Error {
+ case eof
+ case outOfRange
+ }
+
+ /// Specifies the length of buffer.
+ var length: Int {
+ get {
+ data.count
+ }
+ set {
+ switch true {
+ case (data.count < newValue):
+ data.append(Data(count: newValue - data.count))
+ case (newValue < data.count):
+ data = data.subdata(in: 0.. Self {
+ if value <= 63 {
+ return putData(UInt8(value).bigEndian.data)
+ }
+ if value <= 16383 {
+ return putData((UInt16(value) | 0x4000).bigEndian.data)
+ }
+ if value <= 1073741823 {
+ return putData((UInt32(value) | 0x80000000).bigEndian.data)
+ }
+ return putData((UInt64(value) | 0xc000000000000000).bigEndian.data)
+ }
+
+ mutating func getInt() throws -> Int {
+ guard 1 <= bytesAvailable else {
+ throw Error.eof
+ }
+ switch Int(data[position] >> 6) {
+ case 0:
+ defer {
+ position += 1
+ }
+ return Int(data: data[position.. Self {
+ putInt(value.utf8.count)
+ putData(Data(value.utf8))
+ return self
+ }
+
+ mutating func getString() throws -> String {
+ let length = try getInt()
+ let data = try getData(length)
+ return String(data: data, encoding: .utf8) ?? ""
+ }
+
+ mutating func putBool(_ value: Bool) -> Self {
+ putData(Data([value ? 1 : 0]))
+ return self
+ }
+
+ mutating func getBool() throws -> Bool {
+ guard 1 <= bytesAvailable else {
+ throw Error.eof
+ }
+ let value = try getData(1)
+ return value[0] == 1
+ }
+
+ @discardableResult
+ mutating func putData(_ value: Data) -> Self {
+ if position == data.count {
+ data.append(value)
+ position = data.count
+ return self
+ }
+ let length = min(data.count - position, value.count)
+ data.replaceSubrange(position.. Data {
+ guard length <= bytesAvailable else {
+ throw Error.eof
+ }
+ position += length
+ return data.subdata(in: position - length.. {
+ AsyncStream { continuation in
+ self.incomingContinuation = continuation
+ }
+ }
+
+ var datagram: AsyncStream {
+ AsyncStream { continuation in
+ self.datagramContinuation = continuation
+ }
+ }
+
+ private var timeout: UInt64 = 15
+ private var connected = false
+ private var windowSizeC = MoQTSocket.defaultWindowSizeC
+ private var totalBytesIn = 0
+ private var queueBytesOut = 0
+ private var totalBytesOut = 0
+ private var connection: NWConnection? {
+ didSet {
+ connection?.stateUpdateHandler = { state in
+ Task { await self.stateDidChange(to: state) }
+ }
+ connection?.viabilityUpdateHandler = { viability in
+ Task { await self.viabilityDidChange(to: viability) }
+ }
+ }
+ }
+ private var options: NWProtocolQUIC.Options = .init()
+ private var outputs: AsyncStream.Continuation?
+ private var connectionGroup: NWConnectionGroup? {
+ didSet {
+ connectionGroup?.newConnectionHandler = { connection in
+ Task { await self.newConnection(connection) }
+ }
+ oldValue?.newConnectionHandler = nil
+ oldValue?.stateUpdateHandler = nil
+ }
+ }
+ private var continuation: CheckedContinuation?
+ private var qualityOfService: DispatchQoS = .userInitiated
+ private var incomingContinuation: AsyncStream.Continuation? {
+ didSet {
+ if let connection, let incomingContinuation {
+ receive(on: connection, continuation: incomingContinuation)
+ }
+ }
+ }
+ private var datagramContinuation: AsyncStream.Continuation?
+ private lazy var networkQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MoQSocket.network", qos: qualityOfService)
+
+ func connect(_ name: String, port: Int) async throws {
+ guard !connected else {
+ throw Error.invalidState
+ }
+ totalBytesIn = 0
+ totalBytesOut = 0
+ queueBytesOut = 0
+ do {
+ let options = NWProtocolQUIC.Options(alpn: Self.alpn).verifySelfCert()
+ let endpoint = NWEndpoint.hostPort(host: .init(name), port: .init(integerLiteral: NWEndpoint.Port.IntegerLiteralType(port)))
+ connection = NWConnection(to: endpoint, using: NWParameters(quic: options))
+ options.isDatagram = true
+ connectionGroup = NWConnectionGroup(with: NWMultiplexGroup(to: endpoint), using: NWParameters(quic: options))
+ try await withCheckedThrowingContinuation { (checkedContinuation: CheckedContinuation) in
+ self.continuation = checkedContinuation
+ Task {
+ try? await Task.sleep(nanoseconds: timeout * 1_000_000_000)
+ guard let continuation else {
+ return
+ }
+ continuation.resume(throwing: Error.connectionTimedOut)
+ self.continuation = nil
+ close()
+ }
+ connection?.start(queue: networkQueue)
+ }
+ } catch {
+ throw error
+ }
+ }
+
+ func send(_ data: Data) {
+ guard connected else {
+ return
+ }
+ queueBytesOut += data.count
+ outputs?.yield(data)
+ }
+
+ func sendDatagram(_ data: Data) {
+ connectionGroup?.send(content: data) { _ in
+ }
+ }
+
+ func close(_ error: NWError? = nil) {
+ guard connection != nil else {
+ return
+ }
+ if let continuation {
+ continuation.resume(throwing: Error.connectionNotEstablished(error))
+ self.continuation = nil
+ }
+ connected = false
+ outputs = nil
+ connection = nil
+ continuation = nil
+ }
+
+ private func newConnection(_ connection: NWConnection) {
+ receive(on: connection, continuation: datagramContinuation)
+ connection.start(queue: networkQueue)
+ }
+
+ private nonisolated func receive(on connection: NWConnection, continuation: AsyncStream.Continuation?) {
+ connection.receive(minimumIncompleteLength: 0, maximumLength: 65558) { content, _, _, _ in
+ if let content {
+ continuation?.yield(content)
+ self.receive(on: connection, continuation: continuation)
+ }
+ }
+ }
+
+ private func stateDidChange(to state: NWConnection.State) {
+ switch state {
+ case .ready:
+ logger.info("Connection is ready.")
+ connected = true
+ let (stream, continuation) = AsyncStream.makeStream()
+ Task {
+ for await data in stream where connected {
+ try await send(data)
+ totalBytesOut += data.count
+ queueBytesOut -= data.count
+ }
+ }
+ self.outputs = continuation
+ self.connectionGroup?.start(queue: networkQueue)
+ self.continuation?.resume()
+ self.continuation = nil
+ case .waiting(let error):
+ logger.warn("Connection waiting:", error)
+ close(error)
+ case .setup:
+ logger.debug("Connection is setting up.")
+ case .preparing:
+ logger.debug("Connection is preparing.")
+ case .failed(let error):
+ logger.warn("Connection failed:", error)
+ close(error)
+ case .cancelled:
+ logger.info("Connection cancelled.")
+ close()
+ @unknown default:
+ logger.error("Unknown connection state.")
+ }
+ }
+
+ private func viabilityDidChange(to viability: Bool) {
+ logger.info("Connection viability changed to ", viability)
+ if viability == false {
+ close()
+ }
+ }
+
+ private func send(_ data: Data) async throws {
+ return try await withCheckedThrowingContinuation { continuation in
+ guard let connection else {
+ continuation.resume(throwing: Error.invalidState)
+ return
+ }
+ connection.send(content: data, completion: .contentProcessed { error in
+ if let error {
+ continuation.resume(throwing: error)
+ return
+ }
+ continuation.resume()
+ })
+ }
+ }
+}
+
+@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
+extension MoQTSocket: NetworkTransportReporter {
+ // MARK: NetworkTransportReporter
+ func makeNetworkMonitor() async -> NetworkMonitor {
+ return .init(self)
+ }
+
+ func makeNetworkTransportReport() -> NetworkTransportReport {
+ return .init(queueBytesOut: queueBytesOut, totalBytesIn: totalBytesIn, totalBytesOut: totalBytesOut)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift
new file mode 100644
index 000000000..6deb2bad1
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift
@@ -0,0 +1,12 @@
+public enum MoQTVersion: Int, Sendable {
+ case draft01 = 0xff000001
+ case draft02 = 0xff000002
+ case draft03 = 0xff000003
+ case draft04 = 0xff000004
+ case draft05 = 0xff000005
+ case draft06 = 0xff000006
+ case draft07 = 0xff000007
+
+ case draft07Exp = 0xff070001
+ case draft07Exp2 = 0xff070002
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift
new file mode 100644
index 000000000..6b97cac7c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift
@@ -0,0 +1,14 @@
+import Foundation
+@testable import MoQTHaishinKit
+import Testing
+
+@Suite struct MoQTPayLoadTests {
+ @Test func putInt() throws {
+ var payload = MoQTPayload()
+ payload.putInt(MoQTVersion.draft04.rawValue)
+ #expect(payload.data == Data([192, 0, 0, 0, 255, 0, 0, 4]))
+ payload.position = 0
+ #expect(try payload.getInt() == MoQTVersion.draft04.rawValue)
+ #expect(payload.position == 8)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift
new file mode 100644
index 000000000..2718184bd
--- /dev/null
+++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift
@@ -0,0 +1,19 @@
+import Foundation
+@testable import MoQTHaishinKit
+import Testing
+
+@Suite struct MoQTStreamHeaderSubgroupTests {
+ @Test func parse() throws {
+ var payload = MoQTPayload()
+ payload.putData(Data([4, 64, 99, 0, 129, 184, 103, 39, 0, 0, 0, 17, 50, 48, 50, 52, 45, 49, 49, 45, 49, 54, 32, 49, 52, 58, 50, 55, 58, 1, 1, 48, 2, 1, 49, 3, 1, 50, 4, 1, 51, 5, 1, 52, 6, 1, 53, 7, 1, 54, 8, 1, 55, 9, 1, 56, 10, 1, 57, 11, 2, 49, 48, 12, 2, 49, 49, 13, 2, 49, 50, 14, 2, 49, 51, 15, 2, 49, 52, 16, 2, 49, 53, 17, 2, 49, 54, 18, 2, 49, 55, 19, 2, 49, 56, 20, 2, 49, 57, 21, 2, 50, 48, 22, 2, 50, 49, 23, 2, 50, 50, 24, 2, 50, 51, 25, 2, 50, 52, 26, 2, 50, 53, 27, 2, 50, 54, 28, 2, 50, 55, 29, 2, 50, 56, 30, 2, 50, 57, 31, 2, 51, 48, 32, 2, 51, 49, 33, 2, 51, 50, 34, 2, 51, 51, 35, 2, 51, 52, 36, 2, 51, 53, 37, 2, 51, 54, 38, 2, 51, 55, 39, 2, 51, 56, 40, 2, 51, 57, 41, 2, 52, 48, 42, 2, 52, 49, 43, 2, 52, 50, 44, 2, 52, 51, 45, 2, 52, 52, 46, 2, 52, 53, 47, 2, 52, 54, 48, 2, 52, 55, 49, 2, 52, 56, 50, 2, 52, 57, 51, 2, 53, 48]))
+ payload.position = 1
+ let message = try MoQTStreamHeaderSubgroup(&payload)
+ #expect(message.trackAlias == 99)
+ #expect(message.groupId == 0)
+ var objects: [MoQTObject] = .init()
+ while 0 < payload.bytesAvailable {
+ objects.append(try MoQTObject(&payload))
+ }
+ #expect(objects.last?.id == 51)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/Package.resolved b/Vendor/HaishinKit.swift/Package.resolved
new file mode 100644
index 000000000..4ed8b61e4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Package.resolved
@@ -0,0 +1,33 @@
+{
+ "originHash" : "0e215ed38a2c303f72c8fca29cc88c3ca1dfd9563d350725303c79aad85ae387",
+ "pins" : [
+ {
+ "identity" : "logboard",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/shogo4405/Logboard.git",
+ "state" : {
+ "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50",
+ "version" : "2.6.0"
+ }
+ },
+ {
+ "identity" : "swift-docc-plugin",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/swiftlang/swift-docc-plugin",
+ "state" : {
+ "revision" : "3e4f133a77e644a5812911a0513aeb7288b07d06",
+ "version" : "1.4.5"
+ }
+ },
+ {
+ "identity" : "swift-docc-symbolkit",
+ "kind" : "remoteSourceControl",
+ "location" : "https://github.com/swiftlang/swift-docc-symbolkit",
+ "state" : {
+ "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34",
+ "version" : "1.0.0"
+ }
+ }
+ ],
+ "version" : 3
+}
diff --git a/Vendor/HaishinKit.swift/Package.swift b/Vendor/HaishinKit.swift/Package.swift
new file mode 100644
index 000000000..fb08bcb73
--- /dev/null
+++ b/Vendor/HaishinKit.swift/Package.swift
@@ -0,0 +1,106 @@
+// swift-tools-version:6.0
+// The swift-tools-version declares the minimum version of Swift required to build this package.
+import PackageDescription
+
+#if swift(<6)
+let swiftSettings: [SwiftSetting] = [
+ .enableExperimentalFeature("ExistentialAny"),
+ .enableExperimentalFeature("StrictConcurrency")
+]
+#else
+let swiftSettings: [SwiftSetting] = [
+ .enableUpcomingFeature("ExistentialAny")
+]
+#endif
+
+let package = Package(
+ name: "HaishinKit",
+ platforms: [
+ .iOS(.v15),
+ .tvOS(.v15),
+ .macCatalyst(.v15),
+ .macOS(.v12),
+ .visionOS(.v1)
+ ],
+ products: [
+ .library(name: "HaishinKit", targets: ["HaishinKit"]),
+ .library(name: "RTMPHaishinKit", targets: ["RTMPHaishinKit"]),
+ .library(name: "SRTHaishinKit", targets: ["SRTHaishinKit"]),
+ .library(name: "MoQTHaishinKit", targets: ["MoQTHaishinKit"]),
+ .library(name: "RTCHaishinKit", targets: ["RTCHaishinKit"])
+ ],
+ dependencies: [
+ .package(url: "https://github.com/swiftlang/swift-docc-plugin", from: "1.4.5"),
+ .package(url: "https://github.com/shogo4405/Logboard.git", "2.6.0"..<"2.7.0")
+ ],
+ targets: [
+ .binaryTarget(
+ name: "libsrt",
+ url: "https://github.com/HaishinKit/libsrt-xcframework/releases/download/v1.5.4/libsrt.xcframework.zip",
+ checksum: "76879e2802e45ce043f52871a0a6764d57f833bdb729f2ba6663f4e31d658c4a"
+ ),
+ .binaryTarget(
+ name: "libdatachannel",
+ url: "https://github.com/HaishinKit/libdatachannel-xcframework/releases/download/v0.24.0/libdatachannel.xcframework.zip",
+ checksum: "52163eed2c9d652d913b20d1fd5a1925c5982b1dcdf335fd916c72ffa385bb26"
+ ),
+ .target(
+ name: "HaishinKit",
+ dependencies: ["Logboard"],
+ path: "HaishinKit/Sources",
+ swiftSettings: swiftSettings
+ ),
+ .target(
+ name: "RTMPHaishinKit",
+ dependencies: ["HaishinKit"],
+ path: "RTMPHaishinKit/Sources",
+ swiftSettings: swiftSettings
+ ),
+ .target(
+ name: "SRTHaishinKit",
+ dependencies: ["libsrt", "HaishinKit"],
+ path: "SRTHaishinKit/Sources",
+ swiftSettings: swiftSettings
+ ),
+ .target(
+ name: "MoQTHaishinKit",
+ dependencies: ["HaishinKit"],
+ path: "MoQTHaishinKit/Sources",
+ swiftSettings: swiftSettings
+ ),
+ .target(
+ name: "RTCHaishinKit",
+ dependencies: ["libdatachannel", "HaishinKit"],
+ path: "RTCHaishinKit/Sources",
+ swiftSettings: swiftSettings
+ ),
+ .testTarget(
+ name: "HaishinKitTests",
+ dependencies: ["HaishinKit"],
+ path: "HaishinKit/Tests",
+ resources: [
+ .process("Asset")
+ ],
+ swiftSettings: swiftSettings
+ ),
+ .testTarget(
+ name: "RTMPHaishinKitTests",
+ dependencies: ["RTMPHaishinKit"],
+ path: "RTMPHaishinKit/Tests",
+ swiftSettings: swiftSettings
+ ),
+ .testTarget(
+ name: "SRTHaishinKitTests",
+ dependencies: ["SRTHaishinKit"],
+ path: "SRTHaishinKit/Tests",
+ swiftSettings: swiftSettings
+ ),
+ .testTarget(
+ name: "RTCHaishinKitTests",
+ dependencies: ["RTCHaishinKit"],
+ path: "RTCHaishinKit/Tests",
+ swiftSettings: swiftSettings
+ )
+ ],
+ swiftLanguageModes: [.v6, .v5]
+)
diff --git a/Vendor/HaishinKit.swift/README.md b/Vendor/HaishinKit.swift/README.md
new file mode 100644
index 000000000..98c2a47b3
--- /dev/null
+++ b/Vendor/HaishinKit.swift/README.md
@@ -0,0 +1,107 @@
+# HaishinKit for iOS, macOS, tvOS, visionOS and [Android](https://github.com/HaishinKit/HaishinKit.kt).
+[](https://github.com/HaishinKit/HaishinKit.swift/stargazers)
+[](https://github.com/HaishinKit/HaishinKit.swift/releases/latest)
+[](https://swiftpackageindex.com/HaishinKit/HaishinKit.swift)
+[](https://swiftpackageindex.com/HaishinKit/HaishinKit.swift)
+[](https://raw.githubusercontent.com/HaishinKit/HaishinKit.swift/master/LICENSE.md)
+[](https://github.com/sponsors/shogo4405)
+
+* Camera and Microphone streaming library via RTMP and SRT for iOS, macOS, tvOS and visionOS.
+* 10th Anniversary🎖️In development for 10 years, with 2,778 commits and 163 releases. Thank you. Since Aug 2, 2015.
+
+## 💖 Sponsors
+Do you need additional support? Technical support on Issues and Discussions is provided only to contributors and academic researchers of HaishinKit. By becoming a sponsor, I can provide the support you need.
+
+Sponsor: [$50 per month](https://github.com/sponsors/shogo4405): Technical support via GitHub Issues/Discussions with priority response.
+
+## 🎨 Features
+- **Protocols** ✨Publish and playback feature are available [RTMP](RTMPHaishinKit/Sources/Docs.docc/index.md), [SRT](SRTHaishinKit/Sources/Docs.docc/index.md) and [WHEP/WHIP(alpha)](RTCHaishinKit/Sources/Docs.docc/index.md).
+- **Multi Camera access** ✨[Support multitasking camera access.](https://developer.apple.com/documentation/avkit/accessing-the-camera-while-multitasking-on-ipad)
+- **Multi Streaming** ✨Allowing live streaming to separate services. Views also support this, enabling the verification of raw video data.
+- **Strict Concurrency** ✨Supports Swift's Strict Concurrency compliance.
+- **Screen Capture** ✨Supports ReplayKit(iOS) and ScreenCaptureKit(macOS) api.
+- **Video mixing** ✨Possible to display any text or bitmap on a video during broadcasting or viewing. This allows for various applications such as watermarking and time display.
+ |Publish|Playback|
+ |:---:|:---:|
+ |
|
|
+
+## 🌏 Requirements
+
+### Development
+|Version|Xcode|Swift|
+|:----:|:----:|:----:|
+|2.2.0+|26.0+|6.0+|
+|2.1.0+|16.4+|6.0+|
+
+### OS
+|iOS|tvOS|Mac Catalyst|macOS|visionOS|watchOS|
+|:-:|:-:|:-:|:-:|:-:|:-:|
+|15.0+|15.0+|15.0+|12.0+|1.0+|-|
+
+- SRTHaishinKit is not avaliable for Mac Catalyst.
+
+## 📖 Getting Started
+
+> [!IMPORTANT]
+> There are several issues that occur when connected to Xcode. Please also refer to [this document](https://github.com/HaishinKit/HaishinKit.swift/blob/main/HaishinKit/Sources/Docs.docc/known-issue.md).
+
+### 🔧 Examples
+- Reference implementation app for live streaming `publish` and `playback`.
+- If an issue occurs, please check whether it also happens in the examples app.
+
+#### Usage
+
+You can verify by changing the URL of the following file.
+https://github.com/HaishinKit/HaishinKit.swift/blob/abf1883d25d0ba29e1d1d67ea9e3a3b5be61a196/Examples/Preference.swift#L1-L7
+
+#### Download
+```sh
+git clone https://github.com/HaishinKit/HaishinKit.swift.git
+cd HaishinKit.swift
+open Examples/Examples.xcodeproj
+```
+
+### 🔧 Installation
+#### Using Swift Package Manager
+```sh
+https://github.com/HaishinKit/HaishinKit.swift
+```
+
+### 🔧 Prerequisites
+
+#### AVAudioSession
+Make sure you setup and activate your AVAudioSession iOS.
+
+```swift
+import AVFoundation
+
+let session = AVAudioSession.sharedInstance()
+do {
+ try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
+ try session.setActive(true)
+} catch {
+ print(error)
+}
+```
+
+### 🔧 Cocoa Keys
+Please make sure to contains `Info.plist` the following values when accessing the camera or microphone.
+```xml
+NSCameraUsageDescription
+your usage description here
+NSMicrophoneUsageDescription
+your usage description here
+```
+
+## 📃 Documentation
+- [API Documentation](https://docs.haishinkit.com/swift/latest/documentation/)
+- [Migration Guide](https://github.com/HaishinKit/HaishinKit.swift/wiki#-migration-guide)
+
+## 🌏 Related projects
+Project name |Notes |License
+----------------|------------|--------------
+[HaishinKit for Android.](https://github.com/HaishinKit/HaishinKit.kt)|Camera and Microphone streaming library via RTMP for Android.|[BSD 3-Clause "New" or "Revised" License](https://github.com/HaishinKit/HaishinKit.kt/blob/master/LICENSE.md)
+[HaishinKit for Flutter.](https://github.com/HaishinKit/HaishinKit.dart)|Camera and Microphone streaming library via RTMP for Flutter.|[BSD 3-Clause "New" or "Revised" License](https://github.com/HaishinKit/HaishinKit.dart/blob/master/LICENSE.md)
+
+## 📜 License
+BSD-3-Clause
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md
new file mode 100644
index 000000000..cf3245970
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md
@@ -0,0 +1,27 @@
+# ``RTCHaishinKit``
+This module supports WHIP/WHEP protocols.
+
+## 🔍 Overview
+RTCHaishinKit is WHIP/WHEP protocols stack in Swift. It internally uses a library that is built from [libdatachannel](https://github.com/paullouisageneau/libdatachannel) and converted into an xcframework.
+
+## 🎨 Features
+- Publish(WHIP)
+ - H264 and OPUS support.
+- Playback(WHEP)
+ - H264 and OPUS support.
+
+## 📓 Usage
+### Logging
+- Defining a Swift wrapper method for `rtcInitLogger`.
+```swift
+await RTCLogger.shared.setLevel(.debug)
+```
+
+### Session
+Currently designed to work with the Session API.
+```swift
+import RTCHaishinKit
+
+await SessionBuilderFactory.shared.register(HTTPSessionFactory())
+```
+
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift
new file mode 100644
index 000000000..8997b9e37
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift
@@ -0,0 +1,32 @@
+import Foundation
+
+extension Array where Element == String {
+ func withCStrings(_ body: ([UnsafePointer]) -> R) -> R {
+ var cStringPtrs: [UnsafePointer] = []
+ cStringPtrs.reserveCapacity(count)
+ func loop(_ i: Int, _ current: [UnsafePointer], _ body: ([UnsafePointer]) -> R) -> R {
+ if i == count {
+ return body(current)
+ }
+ return self[i].withCString { cstr in
+ var next = current
+ next.append(cstr)
+ return loop(i + 1, next, body)
+ }
+ }
+ return loop(0, [], body)
+ }
+
+ func withCStringArray(_ body: (UnsafeMutablePointer?>) -> R) -> R {
+ let cStrings = self.map { $0.utf8CString }
+ let pointerArray = UnsafeMutablePointer?>.allocate(capacity: cStrings.count)
+ for (i, cString) in cStrings.enumerated() {
+ cString.withUnsafeBufferPointer { buf in
+ pointerArray[i] = buf.baseAddress
+ }
+ }
+ let result = body(pointerArray)
+ pointerArray.deallocate()
+ return result
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift
new file mode 100644
index 000000000..717ebb412
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift
@@ -0,0 +1,15 @@
+import HaishinKit
+import libdatachannel
+
+extension AudioCodecSettings.Format {
+ var cValue: rtcCodec? {
+ switch self {
+ case .opus:
+ return RTC_CODEC_OPUS
+ case .aac:
+ return RTC_CODEC_AAC
+ case .pcm:
+ return nil
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift
new file mode 100644
index 000000000..87b2194e7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift
@@ -0,0 +1,13 @@
+import HaishinKit
+import libdatachannel
+
+extension VideoCodecSettings.Format {
+ var cValue: rtcCodec {
+ switch self {
+ case .h264:
+ return RTC_CODEC_H264
+ case .hevc:
+ return RTC_CODEC_H265
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift
new file mode 100644
index 000000000..dd2dabc7e
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift
@@ -0,0 +1,142 @@
+import Foundation
+import HaishinKit
+
+actor HTTPSession: Session {
+ var connected: Bool {
+ get async {
+ peerConnection?.connectionState == .connected
+ }
+ }
+
+ @AsyncStreamed(.closed)
+ private(set) var readyState: AsyncStream
+
+ var stream: any StreamConvertible {
+ _stream
+ }
+
+ private let uri: URL
+ private var location: URL?
+ private var maxRetryCount: Int = 0
+ private var _stream = RTCStream()
+ private var mode: SessionMode
+ private var configuration: HTTPSessionConfiguration?
+ private var peerConnection: RTCPeerConnection?
+
+ init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) {
+ logger.level = .debug
+ self.uri = uri
+ self.mode = mode
+ if let configuration = configuration as? HTTPSessionConfiguration {
+ self.configuration = configuration
+ }
+ }
+
+ func setMaxRetryCount(_ maxRetryCount: Int) {
+ self.maxRetryCount = maxRetryCount
+ }
+
+ func connect(_ disconnected: @Sendable @escaping () -> Void) async throws {
+ guard _readyState.value == .closed else {
+ return
+ }
+ _readyState.value = .connecting
+ let peerConnection = try makePeerConnection()
+ switch mode {
+ case .publish:
+ let audioSettings = await _stream.audioSettings
+ try peerConnection.addTrack(AudioStreamTrack(audioSettings), stream: _stream)
+ let videoSettings = await _stream.videoSettings
+ try peerConnection.addTrack(VideoStreamTrack(videoSettings), stream: _stream)
+ case .playback:
+ await _stream.setDirection(.recvonly)
+ try peerConnection.addTransceiver(.audio, stream: _stream)
+ try peerConnection.addTransceiver(.video, stream: _stream)
+ }
+ do {
+ self.peerConnection = peerConnection
+ try peerConnection.setLocalDesciption(.offer)
+ let answer = try await requestOffer(uri, offer: peerConnection.createOffer())
+ try peerConnection.setRemoteDesciption(answer, type: .answer)
+ _readyState.value = .open
+ } catch {
+ logger.warn(error)
+ await _stream.close()
+ peerConnection.close()
+ _readyState.value = .closed
+ throw error
+ }
+ }
+
+ func close() async throws {
+ guard let location, _readyState.value == .open else {
+ return
+ }
+ _readyState.value = .closing
+ var request = URLRequest(url: location)
+ request.httpMethod = "DELETE"
+ request.addValue("application/sdp", forHTTPHeaderField: "Content-Type")
+ _ = try await URLSession.shared.data(for: request)
+ await _stream.close()
+ peerConnection?.close()
+ self.location = nil
+ _readyState.value = .closed
+ }
+
+ private func requestOffer(_ url: URL, offer: String) async throws -> String {
+ logger.debug(offer)
+ var request = URLRequest(url: url)
+ request.httpMethod = "POST"
+ request.addValue("application/sdp", forHTTPHeaderField: "Content-Type")
+ request.httpBody = offer.data(using: .utf8)
+ let (data, response) = try await URLSession.shared.data(for: request)
+ if let response = response as? HTTPURLResponse {
+ if let location = response.allHeaderFields["Location"] as? String {
+ if location.hasSuffix("http") {
+ self.location = URL(string: location)
+ } else {
+ var baseURL = "\(url.scheme ?? "http")://\(url.host ?? "")"
+ if let port = url.port {
+ baseURL += ":\(port)"
+ }
+ self.location = URL(string: "\(baseURL)\(location)")
+ }
+ }
+ }
+ return String(data: data, encoding: .utf8) ?? ""
+ }
+
+ private func makePeerConnection() throws -> RTCPeerConnection {
+ let conneciton = try RTCPeerConnection(configuration)
+ conneciton.delegate = self
+ return conneciton
+ }
+}
+
+extension HTTPSession: RTCPeerConnectionDelegate {
+ // MARK: RTCPeerConnectionDelegate
+ nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged state: RTCPeerConnection.ConnectionState) {
+ Task {
+ if state == .connected {
+ if await mode == .publish {
+ await _stream.setDirection(.sendonly)
+ }
+ }
+ }
+ }
+
+ nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState) {
+ }
+
+ nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState) {
+ }
+
+ nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged gatheringState: RTCPeerConnection.IceGatheringState) {
+ }
+
+ nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate) {
+ }
+
+ nonisolated func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift
new file mode 100644
index 000000000..20ee716c7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift
@@ -0,0 +1,20 @@
+import HaishinKit
+
+/// A configuration object that defines options for an HTTPSession.
+///
+/// The properties of this structure are internally converted into
+/// an `RTCConfiguration` and applied when creating the underlying
+/// `RTCPeerConnection`.
+///
+public struct HTTPSessionConfiguration: SessionConfiguration, RTCConfigurationConvertible {
+ public var iceServers: [String] = []
+ public var bindAddress: String?
+ public var certificateType: RTCCertificateType?
+ public var iceTransportPolicy: RTCTransportPolicy?
+ public var isIceUdpMuxEnabled: Bool = false
+ public var isAutoNegotionEnabled: Bool = true
+ public var isForceMediaTransport: Bool = false
+ public var portRange: Range?
+ public var mtu: Int32?
+ public var maxMesasgeSize: Int32?
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift
new file mode 100644
index 000000000..02e21e8d7
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift
@@ -0,0 +1,13 @@
+import Foundation
+import HaishinKit
+
+public struct HTTPSessionFactory: SessionFactory {
+ public let supportedProtocols: Set = ["http", "https"]
+
+ public init() {
+ }
+
+ public func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session {
+ return HTTPSession(uri: uri, mode: mode, configuration: configuration)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift
new file mode 100644
index 000000000..67da5c56f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift
@@ -0,0 +1,20 @@
+import libdatachannel
+
+public enum RTCCertificateType: Sendable, Encodable {
+ case `default`
+ case ECDSA
+ case RSA
+}
+
+extension RTCCertificateType {
+ var cValue: rtcCertificateType {
+ switch self {
+ case .default:
+ return RTC_CERTIFICATE_DEFAULT
+ case .ECDSA:
+ return RTC_CERTIFICATE_ECDSA
+ case .RSA:
+ return RTC_CERTIFICATE_RSA
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift
new file mode 100644
index 000000000..fb8edf765
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift
@@ -0,0 +1,17 @@
+import Foundation
+import HaishinKit
+import libdatachannel
+
+protocol RTCChannel {
+ var id: Int32 { get }
+
+ func send(_ message: Data) throws
+}
+
+extension RTCChannel {
+ public func send(_ message: Data) throws {
+ try RTCError.check(message.withUnsafeBytes { pointer in
+ return rtcSendMessage(id, pointer.bindMemory(to: CChar.self).baseAddress, Int32(message.count))
+ })
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift
new file mode 100644
index 000000000..dab56faa9
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift
@@ -0,0 +1,101 @@
+import Foundation
+import libdatachannel
+
+public protocol RTCConfigurationConvertible: Sendable {
+ /// A list of ICE server URLs used to establish the connection.
+ var iceServers: [String] { get }
+ /// The local IP address to bind sockets to.
+ var bindAddress: String? { get }
+ /// The type of certificate to generate for DTLS handshakes.
+ var certificateType: RTCCertificateType? { get }
+ /// The ICE transport policy that controls how candidates are gathered.
+ var iceTransportPolicy: RTCTransportPolicy? { get }
+ /// A Boolean value that indicates whether ICE UDP multiplexing is enabled.
+ var isIceUdpMuxEnabled: Bool { get }
+ /// A Boolean value that indicates whether negotiation is performed automatically.
+ var isAutoNegotionEnabled: Bool { get }
+ /// A Boolean value that forces the use of media transport even for data sessions.
+ var isForceMediaTransport: Bool { get }
+ /// The port range available for allocating ICE candidates.
+ var portRange: Range? { get }
+ /// The maximum transmission unit (MTU) for outgoing packets.
+ var mtu: Int32? { get }
+ /// The maximum message size allowed for data channels.
+ var maxMesasgeSize: Int32? { get }
+}
+
+extension RTCConfigurationConvertible {
+ func createPeerConnection() -> Int32 {
+ return iceServers.withCStringArray { cIceServers in
+ return [bindAddress ?? ""].withCStrings { cStrings in
+ var config = rtcConfiguration()
+ if !iceServers.isEmpty {
+ config.iceServers = cIceServers
+ config.iceServersCount = Int32(iceServers.count)
+ }
+ if bindAddress != nil {
+ config.bindAddress = cStrings[0]
+ }
+ if let certificateType {
+ config.certificateType = certificateType.cValue
+ }
+ if let iceTransportPolicy {
+ config.iceTransportPolicy = iceTransportPolicy.cValue
+ }
+ config.enableIceUdpMux = isIceUdpMuxEnabled
+ config.disableAutoNegotiation = !isAutoNegotionEnabled
+ config.forceMediaTransport = isForceMediaTransport
+ if let portRange {
+ config.portRangeBegin = portRange.lowerBound
+ config.portRangeEnd = portRange.upperBound
+ }
+ if let mtu {
+ config.mtu = mtu
+ }
+ if let maxMesasgeSize {
+ config.maxMessageSize = maxMesasgeSize
+ }
+ return rtcCreatePeerConnection(&config)
+ }
+ }
+ }
+}
+
+public struct RTCConfiguration: RTCConfigurationConvertible {
+ static let empty = RTCConfiguration()
+
+ public let iceServers: [String]
+ public let bindAddress: String?
+ public let certificateType: RTCCertificateType?
+ public let iceTransportPolicy: RTCTransportPolicy?
+ public let isIceUdpMuxEnabled: Bool
+ public let isAutoNegotionEnabled: Bool
+ public let isForceMediaTransport: Bool
+ public let portRange: Range?
+ public let mtu: Int32?
+ public let maxMesasgeSize: Int32?
+
+ public init(
+ iceServers: [String] = [],
+ bindAddress: String? = nil,
+ certificateType: RTCCertificateType? = nil,
+ iceTransportPolicy: RTCTransportPolicy? = nil,
+ isIceUdpMuxEnabled: Bool = false,
+ isAutoNegotionEnabled: Bool = true,
+ isForceMediaTransport: Bool = false,
+ portRange: Range? = nil,
+ mtu: Int32? = nil,
+ maxMesasgeSize: Int32? = nil
+ ) {
+ self.iceServers = iceServers
+ self.bindAddress = bindAddress
+ self.certificateType = certificateType
+ self.iceTransportPolicy = iceTransportPolicy
+ self.isIceUdpMuxEnabled = isIceUdpMuxEnabled
+ self.isAutoNegotionEnabled = isAutoNegotionEnabled
+ self.isForceMediaTransport = isForceMediaTransport
+ self.portRange = portRange
+ self.mtu = mtu
+ self.maxMesasgeSize = maxMesasgeSize
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift
new file mode 100644
index 000000000..9d572db2b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift
@@ -0,0 +1,121 @@
+import Foundation
+import libdatachannel
+
+/// Delegate for receiving RTCDataChannel events.
+public protocol RTCDataChannelDelegate: AnyObject {
+ /// Called when the readyState of the data channel changes.
+ /// - Parameters:
+ /// - dataChannel: The RTCDataChannel instance.
+ /// - readyState: The updated readyState.
+ func dataChannel(_ dataChannel: RTCDataChannel, readyStateChanged readyState: RTCDataChannel.ReadyState)
+
+ /// Called when a binary message is received.
+ /// - Parameters:
+ /// - dataChannel: The RTCDataChannel instance.
+ /// - message: The received binary data.
+ func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessage message: Data)
+
+ /// Called when a text message is received.
+ /// - Parameters:
+ /// - dataChannel: The RTCDataChannel instance.
+ /// - message: The received text message.
+ func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessage message: String)
+}
+
+public final class RTCDataChannel: RTCChannel {
+ /// Represents the ready state of an RTCDataChannel.
+ public enum ReadyState {
+ /// The data channel is being created and the connection is in progress.
+ case connecting
+ /// The data channel is fully established and ready to send and receive messages.
+ case open
+ /// The data channel is in the process of closing.
+ case closing
+ /// The data channel has been closed and can no longer be used.
+ case closed
+ }
+
+ public weak var delegate: (any RTCDataChannelDelegate)?
+
+ /// The label.
+ public var label: String {
+ do {
+ return try CUtil.getString { buffer, size in
+ rtcGetDataChannelLabel(id, buffer, size)
+ }
+ } catch {
+ logger.warn(error)
+ return ""
+ }
+ }
+
+ /// The stream id.
+ public var stream: Int {
+ Int(rtcGetDataChannelStream(id))
+ }
+
+ public private(set) var readyState: ReadyState = .connecting {
+ didSet {
+ delegate?.dataChannel(self, readyStateChanged: readyState)
+ }
+ }
+
+ let id: Int32
+
+ init(id: Int32) throws {
+ self.id = id
+ try RTCError.check(id)
+ do {
+ try RTCError.check(rtcSetOpenCallback(id) { _, pointer in
+ guard let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .open
+ })
+ try RTCError.check(rtcSetClosedCallback(id) { _, pointer in
+ guard let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .connecting
+ })
+ try RTCError.check(rtcSetMessageCallback(id) { _, bytes, size, pointer in
+ guard let bytes, let pointer else { return }
+ if 0 <= size {
+ let data = Data(bytes: bytes, count: Int(size))
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(data)
+ } else {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(String(cString: bytes))
+ }
+ })
+ try RTCError.check(rtcSetErrorCallback(id) { _, error, pointer in
+ guard let error, let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().errorOccurred(String(cString: error))
+ })
+ rtcSetUserPointer(id, Unmanaged.passUnretained(self).toOpaque())
+ } catch {
+ rtcDeleteDataChannel(id)
+ throw error
+ }
+ }
+
+ deinit {
+ rtcDeleteDataChannel(id)
+ }
+
+ public func send(_ message: String) throws {
+ guard let buffer = message.data(using: .utf8) else {
+ return
+ }
+ try RTCError.check(buffer.withUnsafeBytes { pointer in
+ return rtcSendMessage(id, pointer.bindMemory(to: CChar.self).baseAddress, -Int32(message.count))
+ })
+ }
+
+ private func errorOccurred(_ error: String) {
+ logger.warn(error)
+ }
+
+ private func didReceiveMessage(_ message: Data) {
+ delegate?.dataChannel(self, didReceiveMessage: message)
+ }
+
+ private func didReceiveMessage(_ message: String) {
+ delegate?.dataChannel(self, didReceiveMessage: message)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift
new file mode 100644
index 000000000..6e591be77
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift
@@ -0,0 +1,24 @@
+import libdatachannel
+
+public enum RTCDirection: Sendable {
+ case unknown
+ case sendrecv
+ case sendonly
+ case recvonly
+ case inactive
+
+ var cValue: rtcDirection {
+ switch self {
+ case .unknown:
+ return RTC_DIRECTION_UNKNOWN
+ case .sendrecv:
+ return RTC_DIRECTION_SENDRECV
+ case .sendonly:
+ return RTC_DIRECTION_SENDONLY
+ case .recvonly:
+ return RTC_DIRECTION_RECVONLY
+ case .inactive:
+ return RTC_DIRECTION_INACTIVE
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift
new file mode 100644
index 000000000..52ffe6a7a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift
@@ -0,0 +1,47 @@
+public enum RTCError: RawRepresentable, Swift.Error {
+ @discardableResult
+ static func check(_ result: Int32) throws -> Int32 {
+ if result < 0 {
+ throw RTCError(rawValue: result)
+ }
+ return result
+ }
+
+ public typealias RawValue = Int32
+
+ case invalid
+ case failure
+ case notAvail
+ case tooSmall
+ case undefined(value: Int32)
+
+ public var rawValue: Int32 {
+ switch self {
+ case .invalid:
+ return -1
+ case .failure:
+ return -2
+ case .notAvail:
+ return -3
+ case .tooSmall:
+ return -4
+ case .undefined(let value):
+ return value
+ }
+ }
+
+ public init(rawValue: Int32) {
+ switch rawValue {
+ case -1:
+ self = .invalid
+ case -2:
+ self = .failure
+ case -3:
+ self = .notAvail
+ case -4:
+ self = .tooSmall
+ default:
+ self = .undefined(value: rawValue)
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift
new file mode 100644
index 000000000..54b3b9604
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift
@@ -0,0 +1,21 @@
+import Foundation
+
+public struct RTCIceCandidate: Sendable {
+ public let candidate: String
+ public let mid: String
+}
+
+extension RTCIceCandidate {
+ init(candidate: UnsafePointer?, mid: UnsafePointer?) {
+ if let candidate {
+ self.candidate = String(cString: candidate)
+ } else {
+ self.candidate = ""
+ }
+ if let mid {
+ self.mid = String(cString: mid)
+ } else {
+ self.mid = ""
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift
new file mode 100644
index 000000000..d4236c903
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift
@@ -0,0 +1,52 @@
+import libdatachannel
+
+/// An actor for writing interpolated string messages to `libdatachannel` logging system.
+public actor RTCLogger {
+ /// Defines the logging severity levels supported by `libdatachannel`.
+ public enum Level {
+ /// No logs will be emitted.
+ case none
+ /// Fatal errors.
+ case fatal
+ /// Recoverable errors.
+ case error
+ /// Potential issues that should be noted.
+ case warning
+ /// General informational messages.
+ case info
+ /// Debug messages for development and troubleshooting.
+ case debug
+ /// Verbose messages for detailed tracing.
+ case verbose
+
+ var cValue: rtcLogLevel {
+ switch self {
+ case .none:
+ return RTC_LOG_NONE
+ case .fatal:
+ return RTC_LOG_FATAL
+ case .error:
+ return RTC_LOG_ERROR
+ case .warning:
+ return RTC_LOG_WARNING
+ case .info:
+ return RTC_LOG_INFO
+ case .debug:
+ return RTC_LOG_DEBUG
+ case .verbose:
+ return RTC_LOG_VERBOSE
+ }
+ }
+ }
+
+ /// The singleton logger instance.
+ public static let shared = RTCLogger()
+
+ /// The current logging level.
+ public private(set) var level: Level = .none
+
+ /// Sets the current logging level.
+ public func setLevel(_ level: Level) {
+ rtcInitLogger(level.cValue, nil)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift
new file mode 100644
index 000000000..158704e83
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift
@@ -0,0 +1,467 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+import libdatachannel
+
+public protocol RTCPeerConnectionDelegate: AnyObject {
+ func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged connectionState: RTCPeerConnection.ConnectionState)
+ func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged iceGatheringState: RTCPeerConnection.IceGatheringState)
+ func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState)
+ func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState)
+ func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel)
+ func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate)
+}
+
+public final class RTCPeerConnection {
+ /// Represents the state of a connection.
+ public enum ConnectionState: Sendable {
+ /// The connection has been created, but no connection attempt has started yet.
+ case new
+ /// A connection attempt is currently in progress.
+ case connecting
+ /// The connection has been successfully established.
+ case connected
+ /// The connection was previously established but is now temporarily lost.
+ case disconnected
+ /// The connection has encountered an unrecoverable error.
+ case failed
+ /// The connection has been closed and will not be used again.
+ case closed
+ }
+
+ /// Represents the ICE gathering state of an RTCPeerConnection.
+ public enum IceGatheringState: Sendable {
+ /// ICE gathering has not yet started.
+ case new
+ /// The agent is currently gathering ICE candidates.
+ case inProgress
+ /// ICE gathering has finished. No more candidates will be gathered.
+ case complete
+ }
+
+ /// Represents the state of the ICE connection for an RTCPeerConnection.
+ public enum IceConnectionState: Sendable {
+ /// The ICE agent is newly created and no checks have started yet.
+ case new
+ /// The ICE agent is checking candidate pairs to find a workable connection.
+ case checking
+ /// A usable ICE connection has been established.
+ case connected
+ /// ICE checks have completed successfully, and the connection is fully stable.
+ case completed
+ /// The ICE connection has failed and cannot recover.
+ case failed
+ /// The ICE connection has been lost or interrupted.
+ case disconnected
+ /// The ICE agent has been closed and will not be used again.
+ case closed
+ }
+
+ /// Represents the signaling state of an RTCPeerConnection.
+ public enum SignalingState: Sendable {
+ /// The signaling state is stable; there is no outstanding local or remote offer.
+ case stable
+ /// A local offer has been created and set as the local description.
+ case haveLocalOffer
+ /// A remote offer has been received and set as the remote description.
+ case haveRemoteOffer
+ /// A provisional (pr-answer) has been set as the local description.
+ case haveLocalPRAnswer
+ /// A provisional (pr-answer) has been set as the remote description.
+ case haveRemotePRAnswer
+ }
+
+ static let audioMediaDescription = """
+m=audio 9 UDP/TLS/RTP/SAVPF 111
+a=mid:0
+a=recvonly
+a=rtpmap:111 opus/48000/2
+a=fmtp:111 minptime=10;useinbandfec=1;stereo=1;sprop-stereo=1
+"""
+
+ static let videoMediaDescription = """
+m=video 9 UDP/TLS/RTP/SAVPF 98
+a=mid:1
+a=recvonly
+a=rtpmap:98 H264/90000
+a=rtcp-fb:98 goog-remb
+a=rtcp-fb:98 nack
+a=rtcp-fb:98 nack pli
+a=fmtp:98 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f
+"""
+
+ static let bufferSize: Int = 1024 * 16
+
+ /// Specifies the delegate of an RTCPeerConnection.
+ public weak var delegate: (any RTCPeerConnectionDelegate)?
+ /// The current state of connection.
+ public private(set) var connectionState: ConnectionState = .new {
+ didSet {
+ guard connectionState != oldValue else {
+ return
+ }
+ delegate?.peerConnection(self, connectionStateChanged: connectionState)
+ }
+ }
+ /// The current state of ice connection.
+ public private(set) var iceConnectionState: IceConnectionState = .new {
+ didSet {
+ guard iceConnectionState != oldValue else {
+ return
+ }
+ delegate?.peerConnection(self, iceConnectionStateChanged: iceConnectionState)
+ }
+ }
+ /// The current state of ice gathering.
+ public private(set) var iceGatheringState: IceGatheringState = .new {
+ didSet {
+ guard iceGatheringState != oldValue else {
+ return
+ }
+ delegate?.peerConnection(self, iceGatheringStateChanged: iceGatheringState)
+ }
+ }
+ /// The current state of signaling.
+ public private(set) var signalingState: SignalingState = .stable {
+ didSet {
+ guard signalingState != oldValue else {
+ return
+ }
+ delegate?.peerConnection(self, signalingStateChanged: signalingState)
+ }
+ }
+ /// Optional callback for receiving compressed video directly from opened tracks.
+ ///
+ /// When set, video tracks will deliver compressed `CMSampleBuffer`s to this callback
+ /// instead of routing through `IncomingStream`. Audio tracks still use `incomingStream`.
+ /// This enables the caller to handle video decode and PTS retiming externally
+ /// (matching the pattern used by RTMP/RTSP ingest paths).
+ public var onCompressedVideo: ((CMSampleBuffer) -> Void)?
+
+ private let connection: Int32
+ private(set) var localDescription: String = ""
+ private weak var incomingStream: RTCStream?
+ private var managedTrackIds: Set = []
+ private var retainedTracks: [RTCTrack] = []
+ private var callbackDelegates: [Any] = []
+
+ /// The current local SDP generated by the peer connection.
+ ///
+ /// This is updated asynchronously after calling `setLocalDesciption(_:)`.
+ public var localDescriptionSdp: String {
+ localDescription
+ }
+
+ /// Creates a peerConnection instance.
+ public init(_ config: (some RTCConfigurationConvertible)? = nil) throws {
+ if let config {
+ connection = config.createPeerConnection()
+ } else {
+ connection = RTCConfiguration.empty.createPeerConnection()
+ }
+ try RTCError.check(connection)
+ do {
+ try RTCError.check(rtcSetLocalDescriptionCallback(connection) { _, sdp, _, pointer in
+ guard let pointer else { return }
+ if let sdp {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().localDescription = String(cString: sdp)
+ }
+ })
+ try RTCError.check(rtcSetLocalCandidateCallback(connection) { _, candidate, mid, pointer in
+ guard let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().didGenerateCandidate(.init(
+ candidate: candidate,
+ mid: mid
+ ))
+ })
+ try RTCError.check(rtcSetStateChangeCallback(connection) { _, state, pointer in
+ guard let pointer else { return }
+ if let state = ConnectionState(cValue: state) {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().connectionState = state
+ }
+ })
+ try RTCError.check(rtcSetIceStateChangeCallback(connection) { _, state, pointer in
+ guard let pointer else { return }
+ if let state = IceConnectionState(cValue: state) {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().iceConnectionState = state
+ }
+ })
+ try RTCError.check(rtcSetGatheringStateChangeCallback(connection) { _, gatheringState, pointer in
+ guard let pointer else { return }
+ if let gatheringState = IceGatheringState(cValue: gatheringState) {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().iceGatheringState = gatheringState
+ }
+ })
+ try RTCError.check(rtcSetSignalingStateChangeCallback(connection) { _, signalingState, pointer in
+ guard let pointer else { return }
+ if let signalingState = SignalingState(cValue: signalingState) {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().signalingState = signalingState
+ }
+ })
+ try RTCError.check(rtcSetTrackCallback(connection) { _, track, pointer in
+ guard let pointer else { return }
+ let pc = Unmanaged.fromOpaque(pointer).takeUnretainedValue()
+ // If this track ID was already created in addTransceiver, skip creating a
+ // duplicate RTCTrack. Creating a second RTCTrack for the same ID would
+ // overwrite libdatachannel callbacks and then deallocate, deleting the track.
+ guard !pc.managedTrackIds.contains(track) else {
+ return
+ }
+ if let newTrack = try? RTCTrack(id: track) {
+ pc.retainedTracks.append(newTrack)
+ pc.didOpenTrack(newTrack)
+ }
+ })
+ try RTCError.check(rtcSetDataChannelCallback(connection) { _, dataChannel, pointer in
+ guard let pointer else { return }
+ if let channel = try? RTCDataChannel(id: dataChannel) {
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().didOpenDataChannel(channel)
+ }
+ })
+ rtcSetUserPointer(connection, Unmanaged.passUnretained(self).toOpaque())
+ } catch {
+ rtcDeletePeerConnection(connection)
+ throw error
+ }
+ }
+
+ /// Attaches an ``RTCStream`` to receive incoming media.
+ ///
+ /// When remote tracks open, they will be bound to the stream so decoded
+ /// audio/video can be forwarded via the stream's outputs.
+ public func attachIncomingStream(_ stream: RTCStream) {
+ incomingStream = stream
+ }
+
+ deinit {
+ close()
+ rtcDeletePeerConnection(connection)
+ }
+
+ /// Adds a `MediaStreamTrack` to the peer connection and associates it with the given `MediaStream`.
+ ///
+ /// - Parameters:
+ /// - track: The media track to add (audio or video).
+ /// - stream: The `MediaStream` that the track belongs to.
+ public func addTrack(_ track: some RTCStreamTrack, stream: RTCStream) throws {
+ let msid = stream.id
+ switch track {
+ case let track as AudioStreamTrack:
+ let config = RTCTrackConfiguration(mid: "0", streamId: msid, audioCodecSettings: track.settings)
+ let id = try config.addTrack(connection, direction: .sendrecv)
+ Task {
+ await stream.addTrack(try RTCSendableStreamTrack(id, id: track.id))
+ }
+ case let track as VideoStreamTrack:
+ let config = RTCTrackConfiguration(mid: "1", streamId: msid, videoCodecSettings: track.settings)
+ let id = try config.addTrack(connection, direction: .sendrecv)
+ Task {
+ await stream.addTrack(try RTCSendableStreamTrack(id, id: track.id))
+ }
+ default:
+ break
+ }
+ }
+
+ /// Adds a recvonly transceiver for the given kind, and binds it to the stream.
+ ///
+ /// This is used for receiving media from a remote publisher (ingest).
+ /// The track is retained internally to prevent deallocation (which would call rtcDeleteTrack).
+ public func addRecvonlyTransceiver(_ kind: RTCStreamKind, stream: RTCStream) throws {
+ let track = try addTransceiver(kind, stream: stream)
+ retainedTracks.append(track)
+ }
+
+ @discardableResult
+ func addTransceiver(_ kind: RTCStreamKind, stream: RTCStream) throws -> RTCTrack {
+ let sdp: String
+ switch kind {
+ case .audio:
+ sdp = Self.audioMediaDescription
+ case .video:
+ sdp = Self.videoMediaDescription
+ }
+ let result = try RTCError.check(sdp.withCString { cString in
+ rtcAddTrack(connection, cString)
+ })
+ managedTrackIds.insert(result)
+ let track = try RTCTrack(id: result)
+ track.delegate = stream
+ return track
+ }
+
+ public func setRemoteDesciption(_ sdp: String, type: SDPSessionDescriptionType) throws {
+ logger.debug(sdp, type.rawValue)
+ try RTCError.check([sdp, type.rawValue].withCStrings { cStrings in
+ rtcSetRemoteDescription(connection, cStrings[0], cStrings[1])
+ })
+ }
+
+ /// Adds a trickled remote ICE candidate.
+ ///
+ /// - Parameters:
+ /// - candidate: SDP candidate line (with or without the `a=` prefix).
+ /// - mid: Optional mid value. Pass `nil` to let libdatachannel autodetect.
+ public func addRemoteCandidate(_ candidate: String, mid: String? = nil) throws {
+ try RTCError.check([candidate, mid ?? ""].withCStrings { cStrings in
+ if mid == nil {
+ return rtcAddRemoteCandidate(connection, cStrings[0], nil)
+ } else {
+ return rtcAddRemoteCandidate(connection, cStrings[0], cStrings[1])
+ }
+ })
+ }
+
+ public func setLocalDesciption(_ type: SDPSessionDescriptionType) throws {
+ logger.debug(type.rawValue)
+ try RTCError.check([type.rawValue].withCStrings { cStrings in
+ rtcSetLocalDescription(connection, cStrings[0])
+ })
+ }
+
+ public func createOffer() throws -> String {
+ return try CUtil.getString { buffer, size in
+ rtcCreateOffer(connection, buffer, size)
+ }
+ }
+
+ public func createAnswer() throws -> String {
+ return try CUtil.getString { buffer, size in
+ rtcCreateAnswer(connection, buffer, size)
+ }
+ }
+
+ public func createDataChannel(_ label: String) throws -> RTCDataChannel {
+ let result = try RTCError.check([label].withCStrings { cStrings in
+ rtcCreateDataChannel(connection, cStrings[0])
+ })
+ return try RTCDataChannel(id: result)
+ }
+
+ public func close() {
+ do {
+ try RTCError.check(rtcClosePeerConnection(connection))
+ } catch {
+ logger.warn(error)
+ }
+ }
+
+ private func didGenerateCandidate(_ candidated: RTCIceCandidate) {
+ delegate?.peerConnection(self, gotIceCandidate: candidated)
+ }
+
+ private func didOpenTrack(_ track: RTCTrack) {
+ logger.info(track)
+ // Route video tracks to the external callback (if set) for direct decode,
+ // and audio tracks to the RTCStream/IncomingStream path.
+ if let onCompressedVideo, track.description.lowercased().contains("m=video") {
+ let delegate = VideoCallbackTrackDelegate(onCompressedVideo)
+ callbackDelegates.append(delegate)
+ track.delegate = delegate
+ } else if let incomingStream {
+ track.delegate = incomingStream
+ }
+ }
+
+ private func didOpenDataChannel(_ dataChannel: RTCDataChannel) {
+ delegate?.peerConnection(self, didOpen: dataChannel)
+ }
+}
+
+/// Routes compressed video from an RTCTrack directly to a callback,
+/// bypassing IncomingStream/VideoCodec/MediaLink.
+private class VideoCallbackTrackDelegate: RTCTrackDelegate {
+ let callback: (CMSampleBuffer) -> Void
+
+ init(_ callback: @escaping (CMSampleBuffer) -> Void) {
+ self.callback = callback
+ }
+
+ func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState) {}
+
+ func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer) {
+ callback(buffer)
+ }
+
+ func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) {
+ // Audio is handled by IncomingStream via the RTCStream path.
+ }
+}
+
+extension RTCPeerConnection.ConnectionState {
+ init?(cValue: rtcState) {
+ switch cValue {
+ case RTC_NEW:
+ self = .new
+ case RTC_CONNECTING:
+ self = .connecting
+ case RTC_CONNECTED:
+ self = .connected
+ case RTC_DISCONNECTED:
+ self = .disconnected
+ case RTC_FAILED:
+ self = .failed
+ case RTC_CLOSED:
+ self = .closed
+ default:
+ return nil
+ }
+ }
+}
+
+extension RTCPeerConnection.IceGatheringState {
+ init?(cValue: rtcGatheringState) {
+ switch cValue {
+ case RTC_GATHERING_NEW:
+ self = .new
+ case RTC_GATHERING_INPROGRESS:
+ self = .inProgress
+ case RTC_GATHERING_COMPLETE:
+ self = .complete
+ default:
+ return nil
+ }
+ }
+}
+
+extension RTCPeerConnection.IceConnectionState {
+ init?(cValue: rtcIceState) {
+ switch cValue {
+ case RTC_ICE_NEW:
+ self = .new
+ case RTC_ICE_CHECKING:
+ self = .checking
+ case RTC_ICE_CONNECTED:
+ self = .connected
+ case RTC_ICE_COMPLETED:
+ self = .completed
+ case RTC_ICE_FAILED:
+ self = .failed
+ case RTC_ICE_DISCONNECTED:
+ self = .disconnected
+ case RTC_ICE_CLOSED:
+ self = .closed
+ default:
+ return nil
+ }
+ }
+}
+
+extension RTCPeerConnection.SignalingState {
+ init?(cValue: rtcSignalingState) {
+ switch cValue {
+ case RTC_SIGNALING_STABLE:
+ self = .stable
+ case RTC_SIGNALING_HAVE_LOCAL_OFFER:
+ self = .haveLocalOffer
+ case RTC_SIGNALING_HAVE_REMOTE_OFFER:
+ self = .haveRemoteOffer
+ case RTC_SIGNALING_HAVE_LOCAL_PRANSWER:
+ self = .haveLocalPRAnswer
+ case RTC_SIGNALING_HAVE_REMOTE_PRANSWER:
+ self = .haveRemotePRAnswer
+ default:
+ return nil
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift
new file mode 100644
index 000000000..bf2daa07a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift
@@ -0,0 +1,25 @@
+import AVFoundation
+import HaishinKit
+import libdatachannel
+
+actor RTCSendableStreamTrack: RTCStreamTrack {
+ let id: String
+ private let track: RTCTrack
+
+ init(_ tid: Int32, id: String) throws {
+ track = try RTCTrack(id: tid)
+ self.id = id
+ }
+
+ func send(_ buffer: CMSampleBuffer) {
+ track.send(buffer)
+ }
+
+ func send(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime) {
+ track.send(buffer, when: when)
+ }
+
+ func setDelegate(_ delegate: some RTCTrackDelegate) {
+ track.delegate = delegate
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift
new file mode 100644
index 000000000..5ce2be587
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift
@@ -0,0 +1,186 @@
+import AVFoundation
+import HaishinKit
+import libdatachannel
+
+public actor RTCStream {
+ enum Error: Swift.Error {
+ case unsupportedCodec
+ }
+
+ static let supportedAudioCodecs: [AudioCodecSettings.Format] = [.opus]
+ static let supportedVideoCodecs: [VideoCodecSettings.Format] = [.h264]
+
+ let id: String = UUID().uuidString
+ private(set) var tracks: [RTCSendableStreamTrack] = []
+ public private(set) var readyState: StreamReadyState = .idle
+ public private(set) var videoTrackId: UInt8? = UInt8.max
+ public private(set) var audioTrackId: UInt8? = UInt8.max
+ package lazy var incoming = IncomingStream(self)
+ package lazy var outgoing: OutgoingStream = {
+ var stream = OutgoingStream()
+ stream.audioSettings = .init(channelMap: [0, 0], format: .opus)
+ return stream
+ }()
+ package var outputs: [any StreamOutput] = []
+ package var bitRateStrategy: (any StreamBitRateStrategy)?
+ private var direction: RTCDirection = .sendonly
+
+ public init() {
+ }
+
+ public func addOutput(_ output: any StreamOutput) {
+ outputs.append(output)
+ }
+
+ public func removeAllOutputs() {
+ outputs.removeAll()
+ }
+
+ public func setDirection(_ direction: RTCDirection) {
+ self.direction = direction
+ switch direction {
+ case .recvonly:
+ Task {
+ await incoming.startRunning()
+ }
+ case .sendonly, .sendrecv:
+ outgoing.startRunning()
+ Task {
+ for await audio in outgoing.audioOutputStream {
+ append(audio.0, when: audio.1)
+ }
+ }
+ Task {
+ for await video in outgoing.videoOutputStream {
+ append(video)
+ }
+ }
+ Task {
+ for await video in outgoing.videoInputStream {
+ outgoing.append(video: video)
+ }
+ }
+ default:
+ break
+ }
+ }
+
+ public func close() async {
+ tracks.removeAll()
+ switch direction {
+ case .sendonly:
+ outgoing.stopRunning()
+ case .recvonly:
+ Task {
+ await incoming.stopRunning()
+ }
+ default:
+ break
+ }
+ }
+
+ func addTrack(_ track: RTCSendableStreamTrack) async {
+ await track.setDelegate(self)
+ tracks.append(track)
+ }
+}
+
+extension RTCStream: _Stream {
+ public func setAudioSettings(_ audioSettings: AudioCodecSettings) throws {
+ guard Self.supportedAudioCodecs.contains(audioSettings.format) else {
+ throw Error.unsupportedCodec
+ }
+ outgoing.audioSettings = audioSettings
+ }
+
+ public func setVideoSettings(_ videoSettings: VideoCodecSettings) throws {
+ guard Self.supportedVideoCodecs.contains(videoSettings.format) else {
+ throw Error.unsupportedCodec
+ }
+ outgoing.videoSettings = videoSettings
+ }
+
+ public func append(_ sampleBuffer: CMSampleBuffer) {
+ switch sampleBuffer.formatDescription?.mediaType {
+ case .video:
+ if sampleBuffer.formatDescription?.isCompressed == true {
+ Task {
+ for track in tracks {
+ await track.send(sampleBuffer)
+ }
+ }
+ } else {
+ outgoing.append(sampleBuffer)
+ outputs.forEach { $0.stream(self, didOutput: sampleBuffer) }
+ }
+ case .audio:
+ if sampleBuffer.formatDescription?.isCompressed == true {
+ Task { await incoming.append(sampleBuffer) }
+ } else {
+ outgoing.append(sampleBuffer)
+ }
+ default:
+ break
+ }
+ }
+
+ public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
+ switch audioBuffer {
+ case let audioBuffer as AVAudioPCMBuffer:
+ outgoing.append(audioBuffer, when: when)
+ outputs.forEach { $0.stream(self, didOutput: audioBuffer, when: when) }
+ case let audioBuffer as AVAudioCompressedBuffer:
+ Task {
+ for track in tracks {
+ await track.send(audioBuffer, when: when)
+ }
+ }
+ default:
+ break
+ }
+ }
+
+ public func dispatch(_ event: NetworkMonitorEvent) async {
+ await bitRateStrategy?.adjustBitrate(event, stream: self)
+ }
+}
+
+extension RTCStream: RTCTrackDelegate {
+ // MARK: RTCTrackDelegate
+ nonisolated func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState) {
+ }
+
+ nonisolated func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer) {
+ Task {
+ await incoming.append(buffer)
+ }
+ }
+
+ nonisolated func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) {
+ Task {
+ await incoming.append(buffer, when: when)
+ }
+ }
+}
+
+extension RTCStream: MediaMixerOutput {
+ // MARK: MediaMixerOutput
+ public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) {
+ switch mediaType {
+ case .audio:
+ audioTrackId = id
+ case .video:
+ videoTrackId = id
+ default:
+ break
+ }
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) {
+ Task { await append(sampleBuffer) }
+ }
+
+ nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) {
+ Task { await append(buffer, when: when) }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift
new file mode 100644
index 000000000..63c7301cf
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift
@@ -0,0 +1,4 @@
+public enum RTCStreamKind {
+ case audio
+ case video
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift
new file mode 100644
index 000000000..51a07885f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift
@@ -0,0 +1,29 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+import HaishinKit
+import libdatachannel
+
+public protocol RTCStreamTrack: Sendable {
+ var id: String { get }
+}
+
+public struct AudioStreamTrack: RTCStreamTrack, Sendable {
+ public let id: String
+ public let settings: AudioCodecSettings
+
+ public init(_ settings: AudioCodecSettings) {
+ self.id = UUID().uuidString
+ self.settings = settings
+ }
+}
+
+public struct VideoStreamTrack: RTCStreamTrack, Sendable {
+ public let id: String
+ public let settings: VideoCodecSettings
+
+ public init(_ settings: VideoCodecSettings) {
+ self.id = UUID().uuidString
+ self.settings = settings
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift
new file mode 100644
index 000000000..ce9ee4ebc
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift
@@ -0,0 +1,180 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+import libdatachannel
+
+protocol RTCTrackDelegate: AnyObject {
+ func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState)
+ func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer)
+ func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime)
+}
+
+class RTCTrack: RTCChannel {
+ enum ReadyState {
+ case connecting
+ case open
+ case closing
+ case closed
+ }
+
+ let id: Int32
+ weak var delegate: (any RTCTrackDelegate)?
+
+ var mid: String {
+ do {
+ return try CUtil.getString { buffer, size in
+ rtcGetTrackMid(id, buffer, size)
+ }
+ } catch {
+ logger.warn(error)
+ return ""
+ }
+ }
+
+ var description: String {
+ do {
+ return try CUtil.getString { buffer, size in
+ rtcGetTrackDescription(id, buffer, size)
+ }
+ } catch {
+ logger.warn(error)
+ return ""
+ }
+ }
+
+ var ssrc: UInt32 {
+ do {
+ return try CUtil.getUInt32 { buffer, size in
+ rtcGetSsrcsForTrack(id, buffer, size)
+ }
+ } catch {
+ logger.warn(error)
+ return 0
+ }
+ }
+
+ private(set) var readyState: ReadyState = .connecting {
+ didSet {
+ switch readyState {
+ case .connecting:
+ break
+ case .open:
+ do {
+ packetizer = try makePacketizer()
+ } catch {
+ logger.warn(error)
+ }
+ case .closing:
+ break
+ case .closed:
+ break
+ }
+ delegate?.track(self, readyStateChanged: readyState)
+ }
+ }
+
+ private var packetizer: (any RTPPacketizer)?
+
+ init(id: Int32) throws {
+ self.id = id
+ try RTCError.check(id)
+ do {
+ rtcSetUserPointer(id, Unmanaged.passUnretained(self).toOpaque())
+ try RTCError.check(rtcSetOpenCallback(id) { _, pointer in
+ guard let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .open
+ })
+ try RTCError.check(rtcSetClosedCallback(id) { _, pointer in
+ guard let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .closed
+ })
+ try RTCError.check(rtcSetMessageCallback(id) { _, bytes, size, pointer in
+ guard let bytes, let pointer else { return }
+ if 0 <= size {
+ let data = Data(bytes: bytes, count: Int(size))
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(data)
+ }
+ })
+ try RTCError.check(rtcSetErrorCallback(id) { _, error, pointer in
+ guard let error, let pointer else { return }
+ Unmanaged.fromOpaque(pointer).takeUnretainedValue().errorOccurred(String(cString: error))
+ })
+ } catch {
+ rtcDeleteTrack(id)
+ throw error
+ }
+ }
+
+ deinit {
+ rtcDeleteTrack(id)
+ }
+
+ func send(_ buffer: CMSampleBuffer) {
+ packetizer?.append(buffer) { packet in
+ try? send(packet.data)
+ }
+ }
+
+ func send(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime) {
+ packetizer?.append(buffer, when: when) { packet in
+ try? send(packet.data)
+ }
+ }
+
+ func didReceiveMessage(_ message: Data) {
+ do {
+ let packet = try RTPPacket(message)
+ packetizer?.append(packet)
+ } catch {
+ logger.warn(error)
+ }
+ }
+
+ private func errorOccurred(_ error: String) {
+ logger.warn(error)
+ }
+
+ private func makePacketizer() throws -> (any RTPPacketizer)? {
+ let description = try SDPMediaDescription(sdp: description)
+ var result: (any RTPPacketizer)?
+ let rtpmap = description.attributes.compactMap { attr -> (UInt8, String, Int, Int?)? in
+ if case let .rtpmap(payload, codec, clock, channel) = attr { return (payload, codec, clock, channel) }
+ return nil
+ }
+ guard !rtpmap.isEmpty else {
+ return nil
+ }
+ switch rtpmap[0].1.lowercased() {
+ case "opus":
+ let packetizer = RTPOpusPacketizer(ssrc: ssrc, payloadType: description.payload)
+ packetizer.delegate = self
+ result = packetizer
+ case "h264":
+ let packetizer = RTPH264Packetizer(ssrc: ssrc, payloadType: description.payload)
+ packetizer.delegate = self
+ result = packetizer
+ default:
+ break
+ }
+ for attribute in description.attributes {
+ switch attribute {
+ case .fmtp(_, let params):
+ result?.formatParameter = RTPFormatParameter(params)
+ default:
+ break
+ }
+ }
+ return result
+ }
+}
+
+extension RTCTrack: RTPPacketizerDelegate {
+ // MARK: RTPPacketizerDelegate
+ func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: CMSampleBuffer) {
+ delegate?.track(self, didOutput: buffer)
+ }
+
+ func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) {
+ delegate?.track(self, didOutput: buffer, when: when)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift
new file mode 100644
index 000000000..8c356e0b6
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift
@@ -0,0 +1,73 @@
+import Foundation
+import HaishinKit
+import libdatachannel
+
+struct RTCTrackConfiguration: Sendable {
+ private static func generateSSRC() -> UInt32 {
+ var ssrc: UInt32 = 0
+ repeat {
+ ssrc = UInt32.random(in: 1...UInt32.max)
+ } while ssrc == 0
+ return ssrc
+ }
+
+ private static func generateCName() -> String {
+ return String(UUID().uuidString.replacingOccurrences(of: "-", with: "").prefix(16))
+ }
+
+ let codec: rtcCodec
+ let ssrc: UInt32
+ let pt: Int32
+ let mid: String
+ let name: String
+ let msid: String
+ let trackId: String
+ let profile: String?
+}
+
+extension RTCTrackConfiguration {
+ init(mid: String, streamId: String, audioCodecSettings: AudioCodecSettings) {
+ self.codec = audioCodecSettings.format.cValue ?? RTC_CODEC_OPUS
+ self.ssrc = Self.generateSSRC()
+ self.pt = 111
+ self.mid = mid
+ self.name = Self.generateCName()
+ self.msid = streamId
+ self.trackId = UUID().uuidString
+ self.profile = "minptime=10;useinbandfec=1;stereo=1;sprop-stereo=1"
+ }
+
+ init(mid: String, streamId: String, videoCodecSettings: VideoCodecSettings) {
+ self.codec = videoCodecSettings.format.cValue
+ self.ssrc = Self.generateSSRC()
+ self.pt = 98
+ self.mid = mid
+ self.name = Self.generateCName()
+ self.msid = streamId
+ self.trackId = UUID().uuidString
+ self.profile = nil
+ }
+}
+
+extension RTCTrackConfiguration {
+ func addTrack(_ connection: Int32, direction: RTCDirection) throws -> Int32 {
+ var rtcTrackInit = makeRtcTrackInit(direction)
+ let result = try RTCError.check(rtcAddTrackEx(connection, &rtcTrackInit))
+ return result
+ }
+
+ private func makeRtcTrackInit(_ direction: RTCDirection) -> rtcTrackInit {
+ // TODO: Fix memory leak
+ return rtcTrackInit(
+ direction: direction.cValue,
+ codec: codec,
+ payloadType: pt,
+ ssrc: ssrc,
+ mid: strdup(mid),
+ name: strdup(name),
+ msid: strdup(msid),
+ trackId: strdup(trackId),
+ profile: profile == nil ? nil : strdup(profile)
+ )
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift
new file mode 100644
index 000000000..3cfbbddc6
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift
@@ -0,0 +1,17 @@
+import libdatachannel
+
+public enum RTCTransportPolicy: Sendable, Encodable {
+ case all
+ case relay
+}
+
+extension RTCTransportPolicy {
+ var cValue: rtcTransportPolicy {
+ switch self {
+ case .all:
+ return RTC_TRANSPORT_POLICY_ALL
+ case .relay:
+ return RTC_TRANSPORT_POLICY_RELAY
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift
new file mode 100644
index 000000000..22514e53b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift
@@ -0,0 +1,40 @@
+import Foundation
+
+@dynamicMemberLookup
+struct RTPFormatParameter: Sendable {
+ static let empty = RTPFormatParameter()
+
+ private let data: [String: String]
+
+ subscript(dynamicMember key: String) -> Int? {
+ guard let value = data[key] else {
+ return nil
+ }
+ return Int(value)
+ }
+
+ subscript(dynamicMember key: String) -> Bool {
+ guard let value = data[key] else {
+ return false
+ }
+ return value == "1" || value == "true"
+ }
+}
+
+extension RTPFormatParameter {
+ init() {
+ self.data = [:]
+ }
+
+ init(_ value: String) {
+ var data: [String: String] = [:]
+ let pairs = value.split(separator: ";")
+ for pair in pairs {
+ let parts = pair.split(separator: "=", maxSplits: 1).map { $0.trimmingCharacters(in: .whitespaces) }
+ if parts.count == 2 {
+ data[parts[0]] = parts[1]
+ }
+ }
+ self.data = data
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift
new file mode 100644
index 000000000..4dd428955
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift
@@ -0,0 +1,317 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+import HaishinKit
+
+private let RTPH264Packetizer_startCode = Data([0x00, 0x00, 0x00, 0x01])
+
+/// https://datatracker.ietf.org/doc/html/rfc3984
+final class RTPH264Packetizer: RTPPacketizer {
+ let ssrc: UInt32
+ let payloadType: UInt8
+ weak var delegate: T?
+ var formatParameter = RTPFormatParameter()
+ private var sequenceNumber: UInt16 = 0
+ private var buffer = Data()
+ private var nalUnitReader = NALUnitReader()
+ private var pictureParameterSets: Data?
+ private var sequenceParameterSets: Data?
+ private var formatDescription: CMFormatDescription?
+
+ // for FragmentUnitA
+ private var fragmentedBuffer = Data()
+ private var fragmentedStarted = false
+ private var fragmentedTimestamp: UInt32 = 0
+ private var timestamp: RTPTimestamp = .init(90000)
+
+ private lazy var jitterBuffer: RTPJitterBuffer = {
+ let jitterBuffer = RTPJitterBuffer()
+ jitterBuffer.delegate = self
+ return jitterBuffer
+ }()
+
+ init(ssrc: UInt32, payloadType: UInt8) {
+ self.ssrc = ssrc
+ self.payloadType = payloadType
+ }
+
+ func append(_ packet: RTPPacket) {
+ jitterBuffer.append(packet)
+ }
+
+ func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void) {
+ let nals = nalUnitReader.read(buffer)
+ for i in 0.. Void) {
+ }
+
+ private func decode(_ packet: RTPPacket) {
+ guard !packet.payload.isEmpty else {
+ return
+ }
+ let nalUnitType = packet.payload[0] & 0x1F
+ switch nalUnitType {
+ case 1...23:
+ decodeSingleNALUnit(packet)
+ case 24:
+ decodeStapA(packet)
+ case 28:
+ decodeFragmentUnitA(packet)
+ default:
+ logger.warn("undefined nal unit type = ", nalUnitType)
+ }
+ }
+
+ /// STAP-A (Single-Time Aggregation Packet)
+ /// - SeeAlso: RFC 3984 section 5.7.1
+ private func decodeStapA(_ packet: RTPPacket) {
+ // payload[0] = STAP-A indicator (F | NRI | Type=24)
+ // then: [NALU-size:16][NALU-data] repeating
+ let payload = packet.payload
+ guard payload.count >= 1 + 2 else {
+ return
+ }
+
+ var offset = 1
+ var nalUnits: [Data] = []
+ nalUnits.reserveCapacity(4)
+
+ while offset + 2 <= payload.count {
+ let size = (UInt16(payload[offset]) << 8) | UInt16(payload[offset + 1])
+ offset += 2
+ guard size > 0 else {
+ logger.warn("decodeStapA(_:) > invalid nalu size = 0")
+ break
+ }
+ let end = offset + Int(size)
+ guard end <= payload.count else {
+ logger.warn("decodeStapA(_:) > bufferUnderrun")
+ break
+ }
+ nalUnits.append(Data(payload[offset.. CMSampleBuffer? {
+ guard formatDescription != nil else {
+ return nil
+ }
+ let presentationTimeStamp: CMTime = self.timestamp.convert(timestamp)
+ let units = nalUnitReader.read(&buffer, type: H264NALUnit.self)
+ var blockBuffer: CMBlockBuffer?
+ ISOTypeBufferUtil.toNALFileFormat(&buffer)
+ blockBuffer = buffer.makeBlockBuffer()
+ var sampleSizes: [Int] = []
+ var sampleBuffer: CMSampleBuffer?
+ var timing = CMSampleTimingInfo(
+ duration: .invalid,
+ presentationTimeStamp: presentationTimeStamp,
+ decodeTimeStamp: .invalid
+ )
+ sampleSizes.append(buffer.count)
+ guard let blockBuffer, CMSampleBufferCreate(
+ allocator: kCFAllocatorDefault,
+ dataBuffer: blockBuffer,
+ dataReady: true,
+ makeDataReadyCallback: nil,
+ refcon: nil,
+ formatDescription: formatDescription,
+ sampleCount: sampleSizes.count,
+ sampleTimingEntryCount: 1,
+ sampleTimingArray: &timing,
+ sampleSizeEntryCount: sampleSizes.count,
+ sampleSizeArray: &sampleSizes,
+ sampleBufferOut: &sampleBuffer) == noErr else {
+ return nil
+ }
+ sampleBuffer?.isNotSync = !units.contains { $0.type == .idr }
+ return sampleBuffer
+ }
+
+ private func makeFormatDescription() -> CMFormatDescription? {
+ guard let pictureParameterSets, let sequenceParameterSets else {
+ return nil
+ }
+ let pictureParameterSetArray = [pictureParameterSets.bytes]
+ let sequenceParameterSetArray = [sequenceParameterSets.bytes]
+ return pictureParameterSetArray[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let ppsBaseAddress = ppsBuffer.baseAddress else {
+ return nil
+ }
+ return sequenceParameterSetArray[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let spsBaseAddress = spsBuffer.baseAddress else {
+ return nil
+ }
+ let pointers: [UnsafePointer] = [
+ spsBaseAddress.assumingMemoryBound(to: UInt8.self),
+ ppsBaseAddress.assumingMemoryBound(to: UInt8.self)
+ ]
+ let sizes: [Int] = [spsBuffer.count, ppsBuffer.count]
+ let nalUnitHeaderLength: Int32 = 4
+ var formatDescriptionOut: CMFormatDescription?
+ CMVideoFormatDescriptionCreateFromH264ParameterSets(
+ allocator: kCFAllocatorDefault,
+ parameterSetCount: pointers.count,
+ parameterSetPointers: pointers,
+ parameterSetSizes: sizes,
+ nalUnitHeaderLength: nalUnitHeaderLength,
+ formatDescriptionOut: &formatDescriptionOut
+ )
+ return formatDescriptionOut
+ }
+ }
+ }
+}
+
+extension RTPH264Packetizer: RTPJitterBufferDelegate {
+ // MARK: RTPJitterBufferDelegate
+ func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket) {
+ decode(sequenced)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift
new file mode 100644
index 000000000..aee2d471b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift
@@ -0,0 +1,27 @@
+import Foundation
+
+protocol RTPJitterBufferDelegate: AnyObject {
+ func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket)
+}
+
+final class RTPJitterBuffer {
+ weak var delegate: T?
+
+ private var buffer: [UInt16: RTPPacket] = [:]
+ private var expectedSequence: UInt16 = 0
+ private let stalePacketCounts: Int = 4
+
+ func append(_ packet: RTPPacket) {
+ buffer[packet.sequenceNumber] = packet
+
+ while let packet = buffer[expectedSequence] {
+ delegate?.jitterBuffer(self, sequenced: packet)
+ buffer.removeValue(forKey: expectedSequence)
+ expectedSequence &+= 1
+ }
+
+ if stalePacketCounts <= buffer.count {
+ expectedSequence &+= 1
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift
new file mode 100644
index 000000000..1ec6c4bff
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift
@@ -0,0 +1,101 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+import HaishinKit
+
+private let kRTPOpusPacketizer_sampleRate = 48000.0
+
+final class RTPOpusPacketizer: RTPPacketizer {
+ let ssrc: UInt32
+ let payloadType: UInt8
+ weak var delegate: T?
+ var formatParameter = RTPFormatParameter()
+ private var timestamp: RTPTimestamp = .init(kRTPOpusPacketizer_sampleRate)
+ private var audioFormat: AVAudioFormat?
+ private var sequenceNumber: UInt16 = 0
+ private lazy var jitterBuffer: RTPJitterBuffer = {
+ let jitterBuffer = RTPJitterBuffer()
+ jitterBuffer.delegate = self
+ return jitterBuffer
+ }()
+
+ init(ssrc: UInt32, payloadType: UInt8) {
+ self.ssrc = ssrc
+ self.payloadType = payloadType
+ }
+
+ func append(_ packet: RTPPacket) {
+ jitterBuffer.append(packet)
+ }
+
+ func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void) {
+ }
+
+ func append(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime, lambda: (RTPPacket) -> Void) {
+ lambda(RTPPacket(
+ version: RTPPacket.version,
+ padding: false,
+ extension: false,
+ cc: 0,
+ marker: true,
+ payloadType: payloadType,
+ sequenceNumber: sequenceNumber,
+ timestamp: timestamp.convert(when),
+ ssrc: ssrc,
+ payload: Data(
+ bytes: buffer.data.assumingMemoryBound(to: UInt8.self),
+ count: Int(buffer.byteLength)
+ )
+ ))
+ sequenceNumber &+= 1
+ }
+
+ private func decode(_ packet: RTPPacket) {
+ if audioFormat == nil {
+ if let formatDescription = makeFormatDescription() {
+ audioFormat = .init(cmAudioFormatDescription: formatDescription)
+ }
+ }
+ if let audioFormat {
+ let buffer = AVAudioCompressedBuffer(format: audioFormat, packetCapacity: 1, maximumPacketSize: packet.payload.count)
+ packet.copyBytes(to: buffer)
+ delegate?.packetizer(self, didOutput: buffer, when: timestamp.convert(packet.timestamp))
+ }
+ }
+
+ package func makeFormatDescription() -> CMFormatDescription? {
+ var formatDescription: CMAudioFormatDescription?
+ let framesPerPacket = AVAudioFrameCount(kRTPOpusPacketizer_sampleRate * 0.02)
+ var audioStreamBasicDescription = AudioStreamBasicDescription(
+ mSampleRate: kRTPOpusPacketizer_sampleRate,
+ mFormatID: kAudioFormatOpus,
+ mFormatFlags: 0,
+ mBytesPerPacket: 0,
+ mFramesPerPacket: framesPerPacket,
+ mBytesPerFrame: 0,
+ mChannelsPerFrame: formatParameter.stereo == true ? 2 : 1,
+ mBitsPerChannel: 0,
+ mReserved: 0
+ )
+ guard CMAudioFormatDescriptionCreate(
+ allocator: kCFAllocatorDefault,
+ asbd: &audioStreamBasicDescription,
+ layoutSize: 0,
+ layout: nil,
+ magicCookieSize: 0,
+ magicCookie: nil,
+ extensions: nil,
+ formatDescriptionOut: &formatDescription
+ ) == noErr else {
+ return nil
+ }
+ return formatDescription
+ }
+}
+
+extension RTPOpusPacketizer: RTPJitterBufferDelegate {
+ // MARK: RTPJitterBufferDelegate
+ func jitterBuffer(_ buffer: RTPJitterBuffer>, sequenced: RTPPacket) {
+ decode(sequenced)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift
new file mode 100644
index 000000000..060a3508b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift
@@ -0,0 +1,90 @@
+import AVFAudio
+import Foundation
+
+/// https://datatracker.ietf.org/doc/html/rfc3550
+struct RTPPacket: Sendable {
+ static let version: UInt8 = 2
+ static let headerSize: Int = 12
+
+ enum Error: Swift.Error {
+ case bufferUnderrun
+ }
+
+ let version: UInt8
+ let padding: Bool
+ let `extension`: Bool
+ let cc: UInt8
+ let marker: Bool
+ let payloadType: UInt8
+ let sequenceNumber: UInt16
+ let timestamp: UInt32
+ let ssrc: UInt32
+ let payload: Data
+
+ func copyBytes(to buffer: AVAudioCompressedBuffer) {
+ let byteLength = UInt32(payload.count)
+ buffer.packetDescriptions?.pointee = AudioStreamPacketDescription(
+ mStartOffset: 0,
+ mVariableFramesInPacket: 0,
+ mDataByteSize: byteLength
+ )
+ buffer.packetCount = 1
+ buffer.byteLength = byteLength
+ payload.withUnsafeBytes { pointer in
+ guard let baseAddress = pointer.baseAddress else {
+ return
+ }
+ buffer.data.copyMemory(from: baseAddress, byteCount: payload.count)
+ }
+ }
+}
+
+extension RTPPacket {
+ var data: Data {
+ var data = Data()
+ var first: UInt8 = (version & 0x03) << 6
+ first |= (padding ? 1 : 0) << 5
+ first |= (`extension` ? 1 : 0) << 4
+ first |= cc & 0x0F
+ data.append(first)
+ var second: UInt8 = (marker ? 1 : 0) << 7
+ second |= payloadType & 0x7F
+ data.append(second)
+ data.append(contentsOf: [
+ UInt8(sequenceNumber >> 8),
+ UInt8(sequenceNumber & 0xFF)
+ ])
+ data.append(contentsOf: [
+ UInt8(timestamp >> 24),
+ UInt8((timestamp >> 16) & 0xFF),
+ UInt8((timestamp >> 8) & 0xFF),
+ UInt8(timestamp & 0xFF)
+ ])
+ data.append(contentsOf: [
+ UInt8(ssrc >> 24),
+ UInt8((ssrc >> 16) & 0xFF),
+ UInt8((ssrc >> 8) & 0xFF),
+ UInt8(ssrc & 0xFF)
+ ])
+ data.append(payload)
+ return data
+ }
+
+ init(_ data: Data) throws {
+ guard RTPPacket.headerSize < data.count else {
+ throw Error.bufferUnderrun
+ }
+ let first = data[0]
+ version = (first & 0b11000000) >> 6
+ padding = (first & 0b00100000) >> 5 == 1
+ `extension` = (first & 0b00010000) >> 4 == 1
+ cc = (first & 0b00001111)
+ let second = data[1]
+ marker = (second & 0b10000000) >> 7 == 1
+ payloadType = (second & 0b01111111)
+ sequenceNumber = UInt16(data[2]) << 8 | UInt16(data[3])
+ timestamp = UInt32(data: data[4...7]).bigEndian
+ ssrc = UInt32(data: data[8...11]).bigEndian
+ payload = Data(data[12...])
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift
new file mode 100644
index 000000000..fdb331db3
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift
@@ -0,0 +1,21 @@
+import AVFAudio
+import CoreMedia
+
+protocol RTPPacketizerDelegate: AnyObject {
+ func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: CMSampleBuffer)
+ func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime)
+}
+
+protocol RTPPacketizer {
+ associatedtype T: RTPPacketizerDelegate
+
+ var delegate: T? { get set }
+ var ssrc: UInt32 { get }
+ var formatParameter: RTPFormatParameter { get set }
+
+ func append(_ packet: RTPPacket)
+
+ func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void)
+
+ func append(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime, lambda: (RTPPacket) -> Void)
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift
new file mode 100644
index 000000000..94cc6b6a3
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift
@@ -0,0 +1,49 @@
+import AVFAudio
+import CoreMedia
+import Foundation
+
+struct RTPTimestamp {
+ static let startedAt: Double = -1
+
+ private let rate: Double
+ private var startedAt = Self.startedAt
+
+ init(_ rate: Double) {
+ self.rate = rate
+ }
+
+ func convert(_ timestamp: UInt32) -> AVAudioTime {
+ return AVAudioTime(hostTime: AVAudioTime.hostTime(forSeconds: Double(timestamp) / rate))
+ }
+
+ func convert(_ timestamp: UInt32) -> CMTime {
+ return CMTime(value: CMTimeValue(timestamp), timescale: CMTimeScale(rate))
+ }
+
+ mutating func convert(_ when: AVAudioTime) -> UInt32 {
+ let seconds: Double
+ if when.hostTime != 0 {
+ seconds = AVAudioTime.seconds(forHostTime: when.hostTime)
+ } else {
+ seconds = Double(when.sampleTime) / when.sampleRate
+ }
+ if startedAt == Self.startedAt {
+ startedAt = seconds
+ }
+ let timestamp = UInt64((seconds - startedAt) * rate)
+ return UInt32(timestamp & 0xFFFFFFFF)
+ }
+
+ mutating func convert(_ time: CMTime) -> UInt32 {
+ let seconds = time.seconds
+ if startedAt == Self.startedAt {
+ startedAt = seconds
+ }
+ let timestamp = UInt64((seconds - startedAt) * rate)
+ return UInt32(timestamp & 0xFFFFFFFF)
+ }
+
+ mutating func reset() {
+ startedAt = Self.startedAt
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift
new file mode 100644
index 000000000..4cfdd6059
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift
@@ -0,0 +1,97 @@
+import Foundation
+
+struct SDPMediaDescription {
+ enum Error: Swift.Error {
+ case invalidArguments(_ sdp: String)
+ }
+
+ static let m = "m="
+ static let mid = "a=mid:"
+ static let fmtp = "a=fmtp:"
+ static let rtpmap = "a=rtpmap:"
+ static let rtcpFb = "a=rtcp-fb:"
+
+ enum Attribute {
+ case rtpmap(payload: UInt8, codec: String, clock: Int, channels: Int?)
+ case fmtp(payload: UInt8, params: String)
+ case rtcpFb(payload: UInt8, type: String)
+ case mid(String)
+ case direction(String)
+ case rtcpMux
+ case other(key: String, value: String?)
+ }
+
+ let kind: String
+ let payload: UInt8
+ let attributes: [Attribute]
+}
+
+extension SDPMediaDescription {
+ init(sdp: String) throws {
+ var kind: String?
+ var payload: UInt8?
+ var attributes: [Attribute] = []
+ let lines = sdp.replacingOccurrences(of: "\r\n", with: "\n").split(separator: "\n")
+ for line in lines {
+ switch true {
+ case line.hasPrefix(Self.m):
+ // m=audio 9 UDP/TLS/RTP/SAVPF 111
+ let components = line.dropFirst(Self.m.count).split(separator: " ")
+ guard 4 <= components.count else {
+ break
+ }
+ kind = String(components[0])
+ if let _payload = UInt8(components[3]) {
+ payload = _payload
+ }
+ case line.hasPrefix(Self.mid):
+ // a=mid:0
+ attributes.append(.mid(String(line.dropFirst(Self.mid.count))))
+ case line.hasPrefix(Self.rtpmap):
+ // a=rtpmap:111 opus/48000/2
+ let components = line.dropFirst(Self.rtpmap.count).split(separator: " ")
+ guard 2 <= components.count else {
+ break
+ }
+ let codec = components[1].split(separator: "/")
+ guard 2 <= codec.count else {
+ break
+ }
+ if let payload = UInt8(components[0]), let clock = Int(codec[1]) {
+ attributes.append(.rtpmap(
+ payload: payload,
+ codec: String(codec[0]),
+ clock: clock,
+ channels: 2 < codec.count ? Int(codec[2]) : nil
+ ))
+ }
+ case line.hasPrefix(Self.rtcpFb):
+ // a=rtcp-fb:96 nack
+ let components = line.dropFirst(Self.rtcpFb.count).split(separator: " ")
+ guard 2 <= components.count else {
+ break
+ }
+ if let payload = UInt8(components[0]) {
+ attributes.append(.rtcpFb(payload: payload, type: String(components[1])))
+ }
+ case line.hasPrefix(Self.fmtp):
+ // a=fmtp:111 minptime=10;useinbandfec=1
+ let components = line.dropFirst(Self.fmtp.count).split(separator: " ")
+ guard 2 <= components.count else {
+ break
+ }
+ if let payload = UInt8(components[0]) {
+ attributes.append(.fmtp(payload: payload, params: String(components[1])))
+ }
+ default:
+ break
+ }
+ }
+ guard let kind, let payload else {
+ throw Error.invalidArguments(sdp)
+ }
+ self.kind = kind
+ self.payload = payload
+ self.attributes = attributes
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift
new file mode 100644
index 000000000..8f94412ed
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift
@@ -0,0 +1,8 @@
+import libdatachannel
+
+public enum SDPSessionDescriptionType: String, Sendable {
+ case answer
+ case offer
+ case pranswer
+ case rollback
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift
new file mode 100644
index 000000000..d195879c5
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift
@@ -0,0 +1,21 @@
+import Foundation
+
+enum CUtil {
+ static func getString(
+ _ lambda: (UnsafeMutablePointer?, Int32) -> Int32
+ ) throws -> String {
+ let size = try RTCError.check(lambda(nil, 0))
+ var buffer = [CChar](repeating: 0, count: Int(size))
+ _ = lambda(&buffer, Int32(size))
+ return String(cString: &buffer)
+ }
+
+ static func getUInt32(
+ _ lambda: (UnsafeMutablePointer?, Int32) -> Int32
+ ) throws -> UInt32 {
+ let size = try RTCError.check(lambda(nil, 0))
+ var buffer: UInt32 = 0
+ _ = lambda(&buffer, Int32(size))
+ return buffer
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift
new file mode 100644
index 000000000..618655c75
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift
@@ -0,0 +1,6 @@
+import Logboard
+
+/// The identifier for the HaishinKit WebRTC integration.
+public let kRTCHaishinKitIdentifier = "com.haishinkit.RTCHaishinKit"
+
+nonisolated(unsafe) let logger = LBLogger.with(kRTCHaishinKitIdentifier)
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift
new file mode 100644
index 000000000..5bee437be
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift
@@ -0,0 +1,13 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import RTCHaishinKit
+
+@Suite struct RTPFormatParamterTests {
+ @Test func opus() throws {
+ let parameter = RTPFormatParameter("minptime=10;useinbandfec=1;stereo=1")
+ #expect(parameter.stereo == true)
+ #expect(parameter.minptime == 10)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift
new file mode 100644
index 000000000..8d7d12568
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift
@@ -0,0 +1,32 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import RTCHaishinKit
+
+@Suite struct RTPJitterBufferTests {
+ final class Result: RTPJitterBufferDelegate {
+ var count = 0
+
+ func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket) {
+ count += 1
+ }
+ }
+
+ @Test func lostPacket() throws {
+ let result = Result()
+ let buffer = RTPJitterBuffer()
+ buffer.delegate = result
+ var packets: [RTPPacket] = []
+ for i in 0...100 {
+ packets.append(.init(version: 2, padding: false, extension: false, cc: 0, marker: false, payloadType: 0, sequenceNumber: UInt16(i), timestamp: UInt32(960 * (i + 1)), ssrc: 0, payload: Data()))
+ }
+ packets.remove(at: 30)
+ packets.remove(at: 50)
+ for packet in packets {
+ buffer.append(packet)
+ }
+
+ #expect(result.count == 99)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift
new file mode 100644
index 000000000..954ebc390
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift
@@ -0,0 +1,30 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import RTCHaishinKit
+
+@Suite struct RTPPacketTests {
+ @Test func packet1() throws {
+ let data = Data([128, 226, 2, 7, 0, 1, 201, 8, 14, 44, 247, 214, 28, 76, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 128])
+ let packet = try RTPPacket(data)
+ #expect(packet.version == 2)
+ #expect(packet.padding == false)
+ #expect(packet.marker == true)
+ #expect(packet.payloadType == 98)
+ #expect(packet.sequenceNumber == 519)
+ #expect(packet.timestamp == 117000)
+ #expect(packet.ssrc == 237828054)
+ #expect(packet.data == data)
+ }
+
+ @Test func packet2() throws {
+ let data = Data([128, 111, 0, 173, 0, 2, 136, 192, 41, 147, 97, 224, 252, 23, 218, 183, 83, 181, 164, 207, 10, 78, 74, 42, 42, 249, 40, 72, 142, 88, 51, 132, 23, 107, 145, 143, 6, 242, 109, 235, 187, 177, 55, 195, 232, 243, 46, 157, 1, 21, 214, 170, 16, 197, 227, 115, 186, 183, 132, 229, 107, 63, 238, 227, 166, 27, 77, 128, 120, 49, 249, 174, 241, 250, 236, 250, 154, 140, 253, 68, 152, 242, 187, 1, 196, 52, 198, 130, 62, 235, 20, 196, 1, 223, 126, 158, 142, 138, 35, 215, 22, 252, 235, 69, 166, 241, 237, 13, 155, 16, 6, 61, 26, 138, 90, 207, 213, 22, 33, 198, 209, 110, 198, 118, 174, 232, 21, 6, 206, 237, 190, 47, 214, 61, 161, 168, 192, 17, 248, 87, 21, 172, 79, 90, 183, 66, 221, 232, 206, 153, 205, 57, 195, 106, 119, 12, 130, 190, 105, 234, 116, 78, 72, 190, 85, 189, 149, 73, 150, 139, 147, 230, 71, 149, 39, 87, 207, 245, 247, 226, 176, 246, 14, 220, 3, 158, 81, 129, 96, 13, 52, 126, 49, 139, 179, 176, 108, 0, 220, 77, 40, 5, 201, 219, 218, 86, 76, 241, 204, 152, 209, 215, 241, 18, 247, 151, 206, 20, 110, 188, 245, 89, 25, 254, 206, 87, 76, 210, 51, 55, 117, 127, 177, 149, 13, 23, 226, 214, 24, 122, 205, 225, 42, 66, 172, 10, 16, 129, 222, 50, 253, 125, 178, 46, 221, 91, 181, 171, 83, 85, 164, 240, 245, 80, 240])
+ let packet = try RTPPacket(data)
+ #expect(packet.version == 2)
+ #expect(packet.padding == false)
+ #expect(packet.payloadType == 111)
+ #expect(packet.sequenceNumber == 173)
+ #expect(packet.data == data)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift
new file mode 100644
index 000000000..4d8c037aa
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift
@@ -0,0 +1,14 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import RTCHaishinKit
+
+@Suite struct RTPTimestampTests {
+ @Test func convertRTPPacketTimestamp_H264() throws {
+ var timestamp = RTPTimestamp(90000.0)
+ #expect(timestamp.convert(CMTime(value: 511364443358833, timescale: 1000000000)) == 0)
+ #expect(timestamp.convert(CMTime(value: 511364476594833, timescale: 1000000000)) == 2991)
+ #expect(timestamp.convert(CMTime(value: 511364509930833, timescale: 1000000000)) == 5991)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift
new file mode 100644
index 000000000..a5e8ce69c
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift
@@ -0,0 +1,65 @@
+import AVFoundation
+import Foundation
+import Testing
+
+@testable import RTCHaishinKit
+
+@Suite struct SDPMediaDescriptionTests {
+ @Test func opus() throws {
+ let sdp = """
+ m=audio 9 UDP/TLS/RTP/SAVPF 111
+ c=IN IP4 0.0.0.0
+ a=rtpmap:111 opus/48000/2
+ a=fmtp:111 minptime=10;useinbandfec=1
+ a=rtcp-mux
+ a=rtcp-rsize
+ a=sendrecv
+ a=mid:0
+ """
+
+ let mediaDescription = try SDPMediaDescription(sdp: sdp)
+ #expect(mediaDescription.kind == "audio")
+ #expect(mediaDescription.payload == 111)
+ for attributes in mediaDescription.attributes {
+ switch attributes {
+ case .rtpmap(let payload, let codec, let clock, let channels):
+ #expect(payload == 111)
+ #expect(codec == "opus")
+ #expect(clock == 48000)
+ #expect(channels == 2)
+ case .mid(let mid):
+ #expect(mid == "0")
+ default:
+ break
+ }
+ }
+ let rtpmap = mediaDescription.attributes.compactMap { attr -> (UInt8, String, Int, Int?)? in
+ if case let .rtpmap(payload, codec, clock, channel) = attr { return (payload, codec, clock, channel) }
+ return nil
+ }
+ #expect(rtpmap[0].0 == 111)
+ #expect(rtpmap[0].1 == "opus")
+ #expect(rtpmap[0].2 == 48000)
+ #expect(rtpmap[0].3 == 2)
+ }
+
+ @Test func vp8() throws {
+ let sdp = """
+ m=video 9 UDP/TLS/RTP/SAVPF 96
+ c=IN IP4 0.0.0.0
+ a=rtpmap:96 VP8/90000
+ a=rtcp-fb:96 ccm fir
+ a=rtcp-fb:96 nack
+ a=rtcp-fb:96 nack pli
+ a=rtcp-fb:96 goog-remb
+ a=rtcp-fb:96 transport-cc
+ a=rtcp-mux
+ a=rtcp-rsize
+ a=sendrecv
+ a=mid:1
+ """
+ let mediaDescription = try SDPMediaDescription(sdp: sdp)
+ #expect(mediaDescription.kind == "video")
+ #expect(mediaDescription.payload == 96)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift
new file mode 100644
index 000000000..f9a4c5053
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift
@@ -0,0 +1,399 @@
+import Foundation
+import HaishinKit
+
+enum AMFSerializerError: Error {
+ case deserialize
+ case outOfIndex
+}
+
+// MARK: -
+protocol AMFSerializer: ByteArrayConvertible {
+ var reference: AMFReference { get set }
+
+ @discardableResult
+ func serialize(_ value: Bool) -> Self
+ func deserialize() throws -> Bool
+
+ @discardableResult
+ func serialize(_ value: String) -> Self
+ func deserialize() throws -> String
+
+ @discardableResult
+ func serialize(_ value: Int) -> Self
+ func deserialize() throws -> Int
+
+ @discardableResult
+ func serialize(_ value: Double) -> Self
+ func deserialize() throws -> Double
+
+ @discardableResult
+ func serialize(_ value: Date) -> Self
+ func deserialize() throws -> Date
+
+ @discardableResult
+ func serialize(_ value: [(any Sendable)?]) -> Self
+ func deserialize() throws -> [(any Sendable)?]
+
+ @discardableResult
+ func serialize(_ value: AMFArray) -> Self
+ func deserialize() throws -> AMFArray
+
+ @discardableResult
+ func serialize(_ value: AMFObject) -> Self
+ func deserialize() throws -> AMFObject
+
+ @discardableResult
+ func serialize(_ value: AMFXMLDocument) -> Self
+ func deserialize() throws -> AMFXMLDocument
+
+ @discardableResult
+ func serialize(_ value: (any Sendable)?) -> Self
+ func deserialize() throws -> (any Sendable)?
+}
+
+enum AMF0Type: UInt8 {
+ case number = 0x00
+ case bool = 0x01
+ case string = 0x02
+ case object = 0x03
+ // case MovieClip = 0x04
+ case null = 0x05
+ case undefined = 0x06
+ case reference = 0x07
+ case ecmaArray = 0x08
+ case objectEnd = 0x09
+ case strictArray = 0x0a
+ case date = 0x0b
+ case longString = 0x0c
+ case unsupported = 0x0d
+ // case RecordSet = 0x0e
+ case xmlDocument = 0x0f
+ case typedObject = 0x10
+ case avmplush = 0x11
+}
+
+// MARK: - AMF0Serializer
+final class AMF0Serializer: ByteArray {
+ var reference = AMFReference()
+}
+
+extension AMF0Serializer: AMFSerializer {
+ // MARK: AMFSerializer
+ @discardableResult
+ func serialize(_ value: (any Sendable)?) -> Self {
+ if value == nil {
+ return writeUInt8(AMF0Type.null.rawValue)
+ }
+ switch value {
+ case let value as Int:
+ return serialize(Double(value))
+ case let value as UInt:
+ return serialize(Double(value))
+ case let value as Int8:
+ return serialize(Double(value))
+ case let value as UInt8:
+ return serialize(Double(value))
+ case let value as Int16:
+ return serialize(Double(value))
+ case let value as UInt16:
+ return serialize(Double(value))
+ case let value as Int32:
+ return serialize(Double(value))
+ case let value as UInt32:
+ return serialize(Double(value))
+ case let value as Float:
+ return serialize(Double(value))
+ case let value as CGFloat:
+ return serialize(Double(value))
+ case let value as Double:
+ return serialize(Double(value))
+ case let value as Date:
+ return serialize(value)
+ case let value as String:
+ return serialize(value)
+ case let value as Bool:
+ return serialize(value)
+ case let value as [(any Sendable)?]:
+ return serialize(value)
+ case let value as AMFArray:
+ return serialize(value)
+ case let value as AMFObject:
+ return serialize(value)
+ default:
+ return writeUInt8(AMF0Type.undefined.rawValue)
+ }
+ }
+
+ func deserialize() throws -> (any Sendable)? {
+ guard let type = AMF0Type(rawValue: try readUInt8()) else {
+ return nil
+ }
+ position -= 1
+ switch type {
+ case .number:
+ return try deserialize() as Double
+ case .bool:
+ return try deserialize() as Bool
+ case .string:
+ return try deserialize() as String
+ case .object:
+ return try deserialize() as AMFObject
+ case .null:
+ position += 1
+ return nil
+ case .undefined:
+ position += 1
+ return kAMFUndefined
+ case .reference:
+ assertionFailure("TODO")
+ return nil
+ case .ecmaArray:
+ return try deserialize() as AMFArray
+ case .objectEnd:
+ assertionFailure()
+ return nil
+ case .strictArray:
+ return try deserialize() as [(any Sendable)?]
+ case .date:
+ return try deserialize() as Date
+ case .longString:
+ return try deserialize() as String
+ case .unsupported:
+ assertionFailure("Unsupported")
+ return nil
+ case .xmlDocument:
+ return try deserialize() as AMFXMLDocument
+ case .typedObject:
+ return nil
+ case .avmplush:
+ assertionFailure("TODO")
+ return nil
+ }
+ }
+
+ /**
+ * - seealso: 2.2 Number Type
+ */
+ func serialize(_ value: Double) -> Self {
+ writeUInt8(AMF0Type.number.rawValue).writeDouble(value)
+ }
+
+ func deserialize() throws -> Double {
+ guard try readUInt8() == AMF0Type.number.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return try readDouble()
+ }
+
+ func serialize(_ value: Int) -> Self {
+ serialize(Double(value))
+ }
+
+ func deserialize() throws -> Int {
+ Int(try deserialize() as Double)
+ }
+
+ /**
+ * - seealso: 2.3 Boolean Type
+ */
+ func serialize(_ value: Bool) -> Self {
+ writeBytes(Data([AMF0Type.bool.rawValue, value ? 0x01 : 0x00]))
+ }
+
+ func deserialize() throws -> Bool {
+ guard try readUInt8() == AMF0Type.bool.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return try readUInt8() == 0x01 ? true : false
+ }
+
+ /**
+ * - seealso: 2.4 String Type
+ */
+ func serialize(_ value: String) -> Self {
+ let isLong = UInt32(UInt16.max) < UInt32(value.count)
+ writeUInt8(isLong ? AMF0Type.longString.rawValue : AMF0Type.string.rawValue)
+ return serializeUTF8(value, isLong)
+ }
+
+ func deserialize() throws -> String {
+ switch try readUInt8() {
+ case AMF0Type.string.rawValue:
+ return try deserializeUTF8(false)
+ case AMF0Type.longString.rawValue:
+ return try deserializeUTF8(true)
+ default:
+ assertionFailure()
+ return ""
+ }
+ }
+
+ /**
+ * 2.5 Object Type
+ * typealias ECMAObject = [String, Any?]
+ */
+ func serialize(_ value: AMFObject) -> Self {
+ writeUInt8(AMF0Type.object.rawValue)
+ for (key, data) in value {
+ serializeUTF8(key, false).serialize(data)
+ }
+ return serializeUTF8("", false).writeUInt8(AMF0Type.objectEnd.rawValue)
+ }
+
+ func deserialize() throws -> AMFObject {
+ var result = AMFObject()
+
+ switch try readUInt8() {
+ case AMF0Type.null.rawValue:
+ return result
+ case AMF0Type.object.rawValue:
+ break
+ default:
+ throw AMFSerializerError.deserialize
+ }
+
+ while true {
+ let key: String = try deserializeUTF8(false)
+ guard !key.isEmpty else {
+ position += 1
+ break
+ }
+ result[key] = try deserialize()
+ }
+
+ return result
+ }
+
+ /**
+ * - seealso: 2.10 ECMA Array Type
+ */
+ func serialize(_ value: AMFArray) -> Self {
+ writeUInt8(AMF0Type.ecmaArray.rawValue)
+ writeUInt32(UInt32(value.data.count))
+ value.data.enumerated().forEach { index, value in
+ serializeUTF8(index.description, false).serialize(value)
+ }
+ value.dict.forEach { key, value in
+ serializeUTF8(key, false).serialize(value)
+ }
+ serializeUTF8("", false)
+ writeUInt8(AMF0Type.objectEnd.rawValue)
+ return self
+ }
+
+ func deserialize() throws -> AMFArray {
+ switch try readUInt8() {
+ case AMF0Type.null.rawValue:
+ return AMFArray()
+ case AMF0Type.ecmaArray.rawValue:
+ break
+ default:
+ throw AMFSerializerError.deserialize
+ }
+
+ var result = AMFArray(count: Int(try readUInt32()))
+ while true {
+ let key = try deserializeUTF8(false)
+ guard !key.isEmpty else {
+ position += 1
+ break
+ }
+ result[key] = try deserialize()
+ }
+
+ return result
+ }
+
+ /**
+ * - seealso: 2.12 Strict Array Type
+ */
+ func serialize(_ value: [(any Sendable)?]) -> Self {
+ writeUInt8(AMF0Type.strictArray.rawValue)
+ if value.isEmpty {
+ writeBytes(Data([0x00, 0x00, 0x00, 0x00]))
+ return self
+ }
+ writeUInt32(UInt32(value.count))
+ for v in value {
+ serialize(v)
+ }
+ return self
+ }
+
+ func deserialize() throws -> [(any Sendable)?] {
+ guard try readUInt8() == AMF0Type.strictArray.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ var result: [(any Sendable)?] = []
+ let count = Int(try readUInt32())
+ for _ in 0.. Self {
+ writeUInt8(AMF0Type.date.rawValue).writeDouble(value.timeIntervalSince1970 * 1000).writeBytes(Data([0x00, 0x00]))
+ }
+
+ func deserialize() throws -> Date {
+ guard try readUInt8() == AMF0Type.date.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ let date = Date(timeIntervalSince1970: try readDouble() / 1000)
+ position += 2 // timezone offset
+ return date
+ }
+
+ /**
+ * - seealso: 2.17 XML Document Type
+ */
+ func serialize(_ value: AMFXMLDocument) -> Self {
+ writeUInt8(AMF0Type.xmlDocument.rawValue).serializeUTF8(value.description, true)
+ }
+
+ func deserialize() throws -> AMFXMLDocument {
+ guard try readUInt8() == AMF0Type.xmlDocument.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return AMFXMLDocument(data: try deserializeUTF8(true))
+ }
+
+ func deserialize() throws -> AMFTypedObject {
+ guard try readUInt8() == AMF0Type.typedObject.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+
+ let typeName = try deserializeUTF8(false)
+ var result = AMFObject()
+ while true {
+ let key = try deserializeUTF8(false)
+ guard !key.isEmpty else {
+ position += 1
+ break
+ }
+ result[key] = try deserialize()
+ }
+
+ return AMFTypedObject(typeName: typeName, data: result)
+ }
+
+ @discardableResult
+ private func serializeUTF8(_ value: String, _ isLong: Bool) -> Self {
+ let utf8 = Data(value.utf8)
+ if isLong {
+ writeUInt32(UInt32(utf8.count))
+ } else {
+ writeUInt16(UInt16(utf8.count))
+ }
+ return writeBytes(utf8)
+ }
+
+ private func deserializeUTF8(_ isLong: Bool) throws -> String {
+ let length: Int = isLong ? Int(try readUInt32()) : Int(try readUInt16())
+ return try readUTF8Bytes(length)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift
new file mode 100644
index 000000000..788f29eb4
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift
@@ -0,0 +1,574 @@
+import Foundation
+
+final class AMFReference {
+ var strings: [String] = []
+ var objects: [Any] = []
+
+ func getString(_ index: Int) throws -> String {
+ if strings.count <= index {
+ throw AMFSerializerError.outOfIndex
+ }
+ return strings[index]
+ }
+
+ func getObject(_ index: Int) throws -> Any {
+ if objects.count <= index {
+ throw AMFSerializerError.outOfIndex
+ }
+ return objects[index]
+ }
+
+ func indexOf(_ value: T) -> Int? {
+ for (index, data) in objects.enumerated() {
+ if let data: T = data as? T, data == value {
+ return index
+ }
+ }
+ return nil
+ }
+
+ func indexOf(_ value: [Int32]) -> Int? {
+ nil
+ }
+
+ func indexOf(_ value: [UInt32]) -> Int? {
+ nil
+ }
+
+ func indexOf(_ value: [Double]) -> Int? {
+ nil
+ }
+
+ func indexOf(_ value: [Any?]) -> Int? {
+ nil
+ }
+
+ func indexOf(_ value: AMFObject) -> Int? {
+ for (index, data) in objects.enumerated() {
+ if let data: AMFObject = data as? AMFObject, data.description == value.description {
+ return index
+ }
+ }
+ return nil
+ }
+
+ func indexOf(_ value: String) -> Int? {
+ strings.firstIndex(of: value)
+ }
+}
+
+enum AMF3Type: UInt8 {
+ case undefined = 0x00
+ case null = 0x01
+ case boolFalse = 0x02
+ case boolTrue = 0x03
+ case integer = 0x04
+ case number = 0x05
+ case string = 0x06
+ case xml = 0x07
+ case date = 0x08
+ case array = 0x09
+ case object = 0x0A
+ case xmlString = 0x0B
+ case byteArray = 0x0C
+ case vectorInt = 0x0D
+ case vectorUInt = 0x0E
+ case vectorNumber = 0x0F
+ case vectorObject = 0x10
+ case dictionary = 0x11
+}
+
+// MARK: -
+/**
+ AMF3 Serializer
+
+ - seealso: http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/amf/pdf/amf-file-format-spec.pdf
+ */
+final class AMF3Serializer: ByteArray {
+ var reference = AMFReference()
+}
+
+extension AMF3Serializer: AMFSerializer {
+ // MARK: AMFSerializer
+ @discardableResult
+ func serialize(_ value: (any Sendable)?) -> Self {
+ if value == nil {
+ return writeUInt8(AMF3Type.null.rawValue)
+ }
+ switch value {
+ case let value as Int:
+ return serialize(Double(value))
+ case let value as UInt:
+ return serialize(Double(value))
+ case let value as Int8:
+ return serialize(Double(value))
+ case let value as UInt8:
+ return serialize(Double(value))
+ case let value as Int16:
+ return serialize(Double(value))
+ case let value as UInt16:
+ return serialize(Double(value))
+ case let value as Int32:
+ return serialize(Double(value))
+ case let value as UInt32:
+ return serialize(Double(value))
+ case let value as Float:
+ return serialize(Double(value))
+ case let value as CGFloat:
+ return serialize(Double(value))
+ case let value as Double:
+ return serialize(Double(value))
+ case let value as Date:
+ return serialize(value)
+ case let value as String:
+ return serialize(value)
+ case let value as Bool:
+ return serialize(value)
+ case let value as AMFArray:
+ return serialize(value)
+ case let value as AMFObject:
+ return serialize(value)
+ default:
+ return writeUInt8(AMF3Type.undefined.rawValue)
+ }
+ }
+
+ func deserialize() throws -> (any Sendable)? {
+ guard let type = AMF3Type(rawValue: try readUInt8()) else {
+ throw AMFSerializerError.deserialize
+ }
+ position -= 1
+ switch type {
+ case .undefined:
+ position += 1
+ return kAMFUndefined
+ case .null:
+ position += 1
+ return nil
+ case .boolFalse:
+ return try deserialize() as Bool
+ case .boolTrue:
+ return try deserialize() as Bool
+ case .integer:
+ return try deserialize() as Int
+ case .number:
+ return try deserialize() as Double
+ case .string:
+ return try deserialize() as String
+ case .xml:
+ return try deserialize() as AMFXMLDocument
+ case .date:
+ return try deserialize() as Date
+ case .array:
+ return try deserialize() as AMFArray
+ case .object:
+ return try deserialize() as AMFObject
+ case .xmlString:
+ return try deserialize() as AMFXML
+ case .byteArray:
+ return try deserialize() as Data
+ case .vectorInt:
+ return try deserialize() as [Int32]
+ case .vectorUInt:
+ return try deserialize() as [UInt32]
+ case .vectorNumber:
+ return try deserialize() as [Double]
+ case .vectorObject:
+ return try deserialize() as [(any Sendable)?]
+ case .dictionary:
+ assertionFailure("Unsupported")
+ return nil
+ }
+ }
+
+ /**
+ - seealso: 3.4 false Type
+ - seealso: 3.5 true type
+ */
+ @discardableResult
+ func serialize(_ value: Bool) -> Self {
+ writeUInt8(value ? AMF3Type.boolTrue.rawValue : AMF3Type.boolFalse.rawValue)
+ }
+
+ func deserialize() throws -> Bool {
+ switch try readUInt8() {
+ case AMF3Type.boolTrue.rawValue:
+ return true
+ case AMF3Type.boolFalse.rawValue:
+ return false
+ default:
+ throw AMFSerializerError.deserialize
+ }
+ }
+
+ /**
+ - seealso: 3.6 integer type
+ */
+ @discardableResult
+ func serialize(_ value: Int) -> Self {
+ writeUInt8(AMF3Type.integer.rawValue).serializeU29(value)
+ }
+
+ func deserialize() throws -> Int {
+ guard try readUInt8() == AMF3Type.integer.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return try deserializeU29()
+ }
+
+ /**
+ - seealso: 3.7 double type
+ */
+ @discardableResult
+ func serialize(_ value: Double) -> Self {
+ writeUInt8(AMF3Type.number.rawValue).writeDouble(value)
+ }
+
+ func deserialize() throws -> Double {
+ guard try readUInt8() == AMF3Type.number.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return try readDouble()
+ }
+
+ /**
+ - seealso: 3.8 String type
+ */
+ @discardableResult
+ func serialize(_ value: String) -> Self {
+ writeUInt8(AMF3Type.string.rawValue).serializeUTF8(value)
+ }
+
+ func deserialize() throws -> String {
+ guard try readUInt8() == AMF3Type.string.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return try deserializeUTF8()
+ }
+
+ /**
+ - seealso: 3.9 XML type
+ */
+ @discardableResult
+ func serialize(_ value: AMFXMLDocument) -> Self {
+ writeUInt8(AMF3Type.xml.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ let utf8 = Data(value.description.utf8)
+ return serialize(utf8.count << 1 | 0x01).writeBytes(utf8)
+ }
+
+ func deserialize() throws -> AMFXMLDocument {
+ guard try readUInt8() == AMF3Type.xml.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ let refs: Int = try deserializeU29()
+ if (refs & 0x01) == 0 {
+ guard let document: AMFXMLDocument = try reference.getObject(refs >> 1) as? AMFXMLDocument else {
+ throw AMFSerializerError.deserialize
+ }
+ return document
+ }
+ let document = AMFXMLDocument(data: try readUTF8Bytes(refs >> 1))
+ reference.objects.append(document)
+ return document
+ }
+
+ /**
+ - seealso: 3.10 Date type
+ */
+ @discardableResult
+ func serialize(_ value: Date) -> Self {
+ writeUInt8(AMF3Type.date.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ return serializeU29(0x01).writeDouble(value.timeIntervalSince1970 * 1000)
+ }
+
+ func deserialize() throws -> Date {
+ guard try readUInt8() == AMF3Type.date.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ let refs: Int = try deserializeU29()
+ if (refs & 0x01) == 0 {
+ guard let date: Date = try reference.getObject(refs >> 1) as? Date else {
+ throw AMFSerializerError.deserialize
+ }
+ return date
+ }
+ let date = Date(timeIntervalSince1970: try readDouble() / 1000)
+ reference.objects.append(date)
+ return date
+ }
+
+ /**
+ - seealso: 3.11 Array type
+ */
+ @discardableResult
+ func serialize(_ value: AMFArray) -> Self {
+ writeUInt8(AMF3Type.array.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ serialize(value.length << 1 | 0x01)
+ for (key, value) in value.dict {
+ serialize(key).serialize(value)
+ }
+ serialize("")
+ for value in value.data {
+ serialize(value)
+ }
+ return self
+ }
+
+ func deserialize() throws -> AMFArray {
+ guard try readUInt8() == AMF3Type.array.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return AMFArray()
+ }
+
+ /**
+ - seealso: 3.12 Object type
+ - note: ASObject = Dictionary
+ */
+ @discardableResult
+ func serialize(_ value: AMFObject) -> Self {
+ writeUInt8(AMF3Type.object.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ for (key, value) in value {
+ serialize(key).serialize(value)
+ }
+ return serialize("")
+ }
+
+ func deserialize() throws -> AMFObject {
+ guard try readUInt8() == AMF3Type.object.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return AMFObject()
+ }
+
+ /**
+ - seealso: 3.13 XML type
+ */
+ @discardableResult
+ func serialize(_ value: AMFXML) -> Self {
+ writeUInt8(AMF3Type.xmlString.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ let utf8 = Data(value.description.utf8)
+ return serialize(utf8.count << 1 | 0x01).writeBytes(utf8)
+ }
+
+ func deserialize() throws -> AMFXML {
+ guard try readUInt8() == AMF3Type.xml.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ let refs: Int = try deserializeU29()
+ if (refs & 0x01) == 0 {
+ guard let xml: AMFXML = try reference.getObject(refs >> 1) as? AMFXML else {
+ throw AMFSerializerError.deserialize
+ }
+ return xml
+ }
+ let xml = AMFXML(data: try readUTF8Bytes(refs >> 1))
+ reference.objects.append(xml)
+ return xml
+ }
+
+ /**
+ - seealso: 3.14 ByteArray type
+ - note: flash.utils.ByteArray = lf.ByteArray
+ */
+ @discardableResult
+ func serialize(_ value: Data) -> Self {
+ self
+ }
+
+ func deserialize() throws -> Data {
+ Data()
+ }
+
+ /**
+ - seealso: 3.15 Vector Type, vector-int-type
+ */
+ @discardableResult
+ func serialize(_ value: [Int32]) -> Self {
+ writeUInt8(AMF3Type.vectorInt.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ serializeU29(value.count << 1 | 0x01).writeUInt8(0x00)
+ for v in value {
+ writeInt32(v)
+ }
+ return self
+ }
+
+ func deserialize() throws -> [Int32] {
+ guard try readUInt8() == AMF3Type.vectorInt.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return []
+ }
+
+ /**
+ - seealso: 3.15 Vector Type, vector-uint-type
+ */
+ @discardableResult
+ func serialize(_ value: [UInt32]) -> Self {
+ writeUInt8(AMF3Type.vectorUInt.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ serializeU29(value.count << 1 | 0x01).writeUInt8(0x00)
+ for v in value {
+ writeUInt32(v)
+ }
+ return self
+ }
+
+ func deserialize() throws -> [UInt32] {
+ guard try readUInt8() == AMF3Type.vectorUInt.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return []
+ }
+
+ /**
+ - seealso: 3.15 Vector Type, vector-number-type
+ */
+ @discardableResult
+ func serialize(_ value: [Double]) -> Self {
+ writeUInt8(AMF3Type.vectorNumber.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ serializeU29(value.count << 1 | 0x01).writeUInt8(0x00)
+ for v in value {
+ writeDouble(v)
+ }
+ return self
+ }
+
+ func deserialize() throws -> [Double] {
+ guard try readUInt8() == AMF3Type.vectorNumber.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return []
+ }
+
+ /**
+ - seealso: 3.15 Vector Type, vector-object-type
+ */
+ @discardableResult
+ func serialize(_ value: [(any Sendable)?]) -> Self {
+ writeUInt8(AMF3Type.vectorObject.rawValue)
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ reference.objects.append(value)
+ serializeU29(value.count << 1 | 0x01).serializeUTF8("*")
+ for v in value {
+ serialize(v)
+ }
+ return self
+ }
+
+ func deserialize() throws -> [(any Sendable)?] {
+ guard try readUInt8() == AMF3Type.array.rawValue else {
+ throw AMFSerializerError.deserialize
+ }
+ return []
+ }
+
+ /**
+ - seealso: 1.3.1 Variable Length Unsigned 29-bit Integer Encoding
+ */
+ @discardableResult
+ private func serializeU29(_ value: Int) -> Self {
+ if value < Int(Int32.min) || Int(Int32.max) < value {
+ return serialize(Double(value))
+ }
+ let value = UInt32(value)
+ switch UInt32(0) {
+ case value & 0xFFFFFF80:
+ return writeUInt8(UInt8(value & 0x7f))
+ case value & 0xFFFFC000:
+ return writeUInt8(UInt8(value >> 7 | 0x80))
+ .writeUInt8(UInt8(value & 0x7F))
+ case value & 0xFFE00000:
+ return writeUInt8(UInt8(value >> 14 | 0x80))
+ .writeUInt8(UInt8(value >> 7 | 0x80))
+ .writeUInt8(UInt8(value & 0x7F))
+ default:
+ return writeUInt8(UInt8(value >> 22 | 0x80))
+ .writeUInt8(UInt8(value >> 15 | 0x80))
+ .writeUInt8(UInt8(value >> 8 | 0x80))
+ .writeUInt8(UInt8(value & 0xFF))
+ }
+ }
+
+ private func deserializeU29() throws -> Int {
+ var count = 1
+ var result = 0
+ var byte: UInt8 = try readUInt8()
+
+ while byte & 0x80 != 0 && count < 4 {
+ result <<= 7
+ result |= Int(byte & 0x7F)
+ byte = try readUInt8()
+ count += 1
+ }
+
+ if count < 4 {
+ result <<= 7
+ result |= Int(byte)
+ } else {
+ result <<= 8
+ result |= Int(byte)
+ }
+
+ return result
+ }
+
+ /**
+ - seealso: 1.3.2 Strings and UTF-8
+ */
+ @discardableResult
+ private func serializeUTF8(_ value: String) -> Self {
+ if value.isEmpty {
+ return serializeU29(0x01)
+ }
+ if let index: Int = reference.indexOf(value) {
+ return serializeU29(index << 1)
+ }
+ let utf8 = Data(value.utf8)
+ reference.strings.append(value)
+ return serializeU29(utf8.count << 1 | 0x01).writeBytes(utf8)
+ }
+
+ private func deserializeUTF8() throws -> String {
+ let ref: Int = try deserializeU29()
+ if (ref & 0x01) == 0 {
+ return try reference.getString(ref >> 1)
+ }
+ let string: String = try readUTF8Bytes(length)
+ reference.strings.append(string)
+ return string
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift
new file mode 100644
index 000000000..19d89032d
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift
@@ -0,0 +1,151 @@
+import Foundation
+
+/// The singleton AMFUndefined object.
+public let kAMFUndefined = AMFUndefined()
+
+/// The AMFObject typealias represents an object for AcrionScript.
+public typealias AMFObject = [String: (any Sendable)?]
+
+/// The AMFUndefined structure represents an undefined for ActionScript.
+public struct AMFUndefined: Sendable, CustomStringConvertible {
+ public var description: String {
+ "undefined"
+ }
+}
+
+/// The AMFTypedObject structure represents a typed object for ActionScript.
+public struct AMFTypedObject: Sendable {
+ /// The type name.
+ public let typeName: String
+ /// The data of object contents.
+ public let data: AMFObject
+}
+
+// MARK: -
+/// The AMFArray structure represents an array value for ActionScript.
+public struct AMFArray: Sendable {
+ private(set) var data: [(any Sendable)?]
+ private(set) var dict: [String: (any Sendable)?] = [:]
+
+ /// The length of an array.
+ public var length: Int {
+ data.count
+ }
+
+ /// Creates a new instance containing the specified number of a single.
+ public init(count: Int) {
+ self.data = [(any Sendable)?](repeating: kAMFUndefined, count: count)
+ }
+
+ /// Creates a new instance of data.
+ public init(data: [(any Sendable)?]) {
+ self.data = data
+ }
+
+ init(_ dict: AMFObject) {
+ self.dict = dict
+ self.data = .init()
+ }
+}
+
+extension AMFArray: ExpressibleByArrayLiteral {
+ // MARK: ExpressibleByArrayLiteral
+ public init (arrayLiteral elements: (any Sendable)?...) {
+ self = AMFArray(data: elements)
+ }
+
+ /// Accesses the element at the specified position.
+ public subscript(i: Any) -> (any Sendable)? {
+ get {
+ if let i: Int = i as? Int {
+ return i < data.count ? data[i] : kAMFUndefined
+ }
+ if let i: String = i as? String {
+ if let i = Int(i) {
+ return i < data.count ? data[i] : kAMFUndefined
+ }
+ return dict[i] as (any Sendable)
+ }
+ return nil
+ }
+ set {
+ if let i = i as? Int {
+ if data.count <= i {
+ data += [(any Sendable)?](repeating: kAMFUndefined, count: i - data.count + 1)
+ }
+ data[i] = newValue
+ }
+ if let i = i as? String {
+ if let i = Int(i) {
+ if data.count <= i {
+ data += [(any Sendable)?](repeating: kAMFUndefined, count: i - data.count + 1)
+ }
+ data[i] = newValue
+ return
+ }
+ dict[i] = newValue
+ }
+ }
+ }
+}
+
+extension AMFArray: CustomDebugStringConvertible {
+ // MARK: CustomDebugStringConvertible
+ public var debugDescription: String {
+ data.debugDescription + ":" + dict.debugDescription
+ }
+}
+
+extension AMFArray: Equatable {
+ // MARK: Equatable
+ public static func == (lhs: AMFArray, rhs: AMFArray) -> Bool {
+ (lhs.data.description == rhs.data.description) && (lhs.dict.description == rhs.dict.description)
+ }
+}
+
+// MARK: -
+/// ActionScript 1.0 and 2.0 and flash.xml.XMLDocument in ActionScript 3.0
+/// - seealso: 2.17 XML Document Type (amf0-file-format-specification.pdf)
+/// - seealso: 3.9 XMLDocument type (amf-file-format-spec.pdf)
+public struct AMFXMLDocument: Sendable, CustomStringConvertible {
+ public var description: String {
+ data
+ }
+
+ private let data: String
+
+ /// Creates a new instance of string.
+ public init(data: String) {
+ self.data = data
+ }
+}
+
+extension AMFXMLDocument: Equatable {
+ // MARK: Equatable
+ public static func == (lhs: AMFXMLDocument, rhs: AMFXMLDocument) -> Bool {
+ (lhs.description == rhs.description)
+ }
+}
+
+// MARK: -
+/// ActionScript 3.0 introduces a new XML type.
+/// - seealso: 3.13 XML type (amf-file-format-spec.pdf)
+public struct AMFXML: Sendable, CustomStringConvertible {
+ public var description: String {
+ data
+ }
+
+ private let data: String
+
+ /// Creates a new instance of string.
+ public init(data: String) {
+ self.data = data
+ }
+}
+
+extension AMFXML: Equatable {
+ // MARK: Equatable
+ public static func == (lhs: AMFXML, rhs: AMFXML) -> Bool {
+ (lhs.description == rhs.description)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift
new file mode 100644
index 000000000..7b375c5a3
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift
@@ -0,0 +1,130 @@
+import AVFoundation
+import HaishinKit
+import VideoToolbox
+
+protocol DecoderConfigurationRecord {
+ func makeFormatDescription() -> CMFormatDescription?
+}
+
+// MARK: -
+/*
+ - seealso: ISO/IEC 14496-15 2010
+ */
+struct AVCDecoderConfigurationRecord: DecoderConfigurationRecord {
+ static let reserveLengthSizeMinusOne: UInt8 = 0x3F
+ static let reserveNumOfSequenceParameterSets: UInt8 = 0xE0
+ static let reserveChromaFormat: UInt8 = 0xFC
+ static let reserveBitDepthLumaMinus8: UInt8 = 0xF8
+ static let reserveBitDepthChromaMinus8 = 0xF8
+
+ var configurationVersion: UInt8 = 1
+ var avcProfileIndication: UInt8 = 0
+ var profileCompatibility: UInt8 = 0
+ var avcLevelIndication: UInt8 = 0
+ var lengthSizeMinusOneWithReserved: UInt8 = 0
+ var numOfSequenceParameterSetsWithReserved: UInt8 = 0
+ var sequenceParameterSets: [[UInt8]] = []
+ var pictureParameterSets: [[UInt8]] = []
+
+ var chromaFormatWithReserve: UInt8 = 0
+ var bitDepthLumaMinus8WithReserve: UInt8 = 0
+ var bitDepthChromaMinus8WithReserve: UInt8 = 0
+ var sequenceParameterSetExt: [[UInt8]] = []
+
+ var naluLength: Int32 {
+ Int32((lengthSizeMinusOneWithReserved >> 6) + 1)
+ }
+
+ init() {
+ }
+
+ init(data: Data) {
+ self.data = data
+ }
+
+ func makeFormatDescription() -> CMFormatDescription? {
+ return pictureParameterSets[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let ppsBaseAddress = ppsBuffer.baseAddress else {
+ return nil
+ }
+ return sequenceParameterSets[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let spsBaseAddress = spsBuffer.baseAddress else {
+ return nil
+ }
+ let pointers: [UnsafePointer] = [
+ spsBaseAddress.assumingMemoryBound(to: UInt8.self),
+ ppsBaseAddress.assumingMemoryBound(to: UInt8.self)
+ ]
+ let sizes: [Int] = [spsBuffer.count, ppsBuffer.count]
+ let nalUnitHeaderLength: Int32 = 4
+ var formatDescriptionOut: CMFormatDescription?
+ CMVideoFormatDescriptionCreateFromH264ParameterSets(
+ allocator: kCFAllocatorDefault,
+ parameterSetCount: pointers.count,
+ parameterSetPointers: pointers,
+ parameterSetSizes: sizes,
+ nalUnitHeaderLength: nalUnitHeaderLength,
+ formatDescriptionOut: &formatDescriptionOut
+ )
+ return formatDescriptionOut
+ }
+ }
+ }
+}
+
+extension AVCDecoderConfigurationRecord: DataConvertible {
+ // MARK: DataConvertible
+ var data: Data {
+ get {
+ let buffer = ByteArray()
+ .writeUInt8(configurationVersion)
+ .writeUInt8(avcProfileIndication)
+ .writeUInt8(profileCompatibility)
+ .writeUInt8(avcLevelIndication)
+ .writeUInt8(lengthSizeMinusOneWithReserved)
+ .writeUInt8(numOfSequenceParameterSetsWithReserved)
+ for i in 0.. CMFormatDescription? {
+ guard let vps = array[.vps], let sps = array[.sps], let pps = array[.pps] else {
+ return nil
+ }
+ return vps[0].withUnsafeBytes { (vpsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let vpsBaseAddress = vpsBuffer.baseAddress else {
+ return nil
+ }
+ return sps[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let spsBaseAddress = spsBuffer.baseAddress else {
+ return nil
+ }
+ return pps[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in
+ guard let ppsBaseAddress = ppsBuffer.baseAddress else {
+ return nil
+ }
+ var formatDescriptionOut: CMFormatDescription?
+ let pointers: [UnsafePointer] = [
+ vpsBaseAddress.assumingMemoryBound(to: UInt8.self),
+ spsBaseAddress.assumingMemoryBound(to: UInt8.self),
+ ppsBaseAddress.assumingMemoryBound(to: UInt8.self)
+ ]
+ let sizes: [Int] = [vpsBuffer.count, spsBuffer.count, ppsBuffer.count]
+ let nalUnitHeaderLength: Int32 = 4
+ CMVideoFormatDescriptionCreateFromHEVCParameterSets(
+ allocator: kCFAllocatorDefault,
+ parameterSetCount: pointers.count,
+ parameterSetPointers: pointers,
+ parameterSetSizes: sizes,
+ nalUnitHeaderLength: nalUnitHeaderLength,
+ extensions: nil,
+ formatDescriptionOut: &formatDescriptionOut
+ )
+ return formatDescriptionOut
+ }
+ }
+ }
+ }
+}
+
+extension HEVCDecoderConfigurationRecord: DataConvertible {
+ // MARK: DataConvertible
+ var data: Data {
+ get {
+ let buffer = ByteArray()
+ .writeUInt8(configurationVersion)
+ return buffer.data
+ }
+ set {
+ let buffer = ByteArray(data: newValue)
+ do {
+ configurationVersion = try buffer.readUInt8()
+ let a = try buffer.readUInt8()
+ generalProfileSpace = a >> 6
+ generalTierFlag = a & 0x20 > 0
+ generalProfileIdc = a & 0x1F
+ generalProfileCompatibilityFlags = try buffer.readUInt32()
+ generalConstraintIndicatorFlags = UInt64(try buffer.readUInt32()) << 16 | UInt64(try buffer.readUInt16())
+ generalLevelIdc = try buffer.readUInt8()
+ minSpatialSegmentationIdc = try buffer.readUInt16() & 0xFFF
+ parallelismType = try buffer.readUInt8() & 0x3
+ chromaFormat = try buffer.readUInt8() & 0x3
+ bitDepthLumaMinus8 = try buffer.readUInt8() & 0x7
+ bitDepthChromaMinus8 = try buffer.readUInt8() & 0x7
+ avgFrameRate = try buffer.readUInt16()
+ let b = try buffer.readUInt8()
+ constantFrameRate = b >> 6
+ numTemporalLayers = b & 0x38 >> 3
+ temporalIdNested = b & 0x6 >> 1
+ lengthSizeMinusOne = b & 0x3
+ numberOfArrays = try buffer.readUInt8()
+ for _ in 0.. (any DecoderConfigurationRecord)? {
+ guard let configurationBox else {
+ return nil
+ }
+ switch mediaSubType {
+ case .h264:
+ return AVCDecoderConfigurationRecord(data: configurationBox)
+ case .hevc:
+ return HEVCDecoderConfigurationRecord(data: configurationBox)
+ default:
+ return nil
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift
new file mode 100644
index 000000000..0ca1db14a
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift
@@ -0,0 +1,11 @@
+import CoreMedia
+import HaishinKit
+
+extension IncomingStream {
+ func append(_ message: RTMPVideoMessage, presentationTimeStamp: CMTime, formatDesciption: CMFormatDescription?) {
+ guard let buffer = message.makeSampleBuffer(presentationTimeStamp, formatDesciption: formatDesciption) else {
+ return
+ }
+ append(buffer)
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift
new file mode 100644
index 000000000..a7487429b
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift
@@ -0,0 +1,32 @@
+import Foundation
+
+extension URL {
+ var absoluteWithoutAuthenticationString: String {
+ guard var components = URLComponents(string: absoluteString) else {
+ return absoluteString
+ }
+ components.password = nil
+ components.user = nil
+ return components.url?.absoluteString ?? absoluteString
+ }
+
+ var absoluteWithoutQueryString: String {
+ guard let query: String = self.query else {
+ return self.absoluteString
+ }
+ return absoluteString.replacingOccurrences(of: "?" + query, with: "")
+ }
+
+ func dictionaryFromQuery() -> [String: String] {
+ var result: [String: String] = [:]
+ guard let query = URLComponents(string: absoluteString)?.queryItems else {
+ return result
+ }
+ for item in query {
+ if let value: String = item.value {
+ result[item.name] = value
+ }
+ }
+ return result
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift
new file mode 100644
index 000000000..7b3cd107f
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift
@@ -0,0 +1,56 @@
+import Foundation
+
+final class RTMPAuthenticator {
+ enum Error: Swift.Error {
+ case noCredential
+ case failedToAuth(description: String)
+ }
+
+ private static func makeSanJoseAuthCommand(_ url: URL, description: String) -> String {
+ var command: String = url.absoluteString
+
+ guard let index = description.firstIndex(of: "?") else {
+ return command
+ }
+
+ let query = String(description[description.index(index, offsetBy: 1)...])
+ let challenge = String(format: "%08x", UInt32.random(in: 0...UInt32.max))
+ let dictionary = URL(string: "http://localhost?" + query)!.dictionaryFromQuery()
+
+ var response = MD5.base64("\(url.user!)\(dictionary["salt"]!)\(url.password!)")
+ if let opaque = dictionary["opaque"] {
+ command += "&opaque=\(opaque)"
+ response += opaque
+ } else if let challenge: String = dictionary["challenge"] {
+ response += challenge
+ }
+
+ response = MD5.base64("\(response)\(challenge)")
+ command += "&challenge=\(challenge)&response=\(response)"
+
+ return command
+ }
+
+ func makeCommand(_ command: String, status: RTMPStatus) -> Result {
+ switch true {
+ case status.description.contains("reason=needauth"):
+ guard
+ let uri = URL(string: command) else {
+ return .failure(Error.noCredential)
+ }
+ let command = Self.makeSanJoseAuthCommand(uri, description: status.description)
+ return .success(command)
+ case status.description.contains("authmod=adobe"):
+ guard
+ let uri = URL(string: command),
+ let user = uri.user, uri.password != nil else {
+ return .failure(Error.noCredential)
+ }
+ let query = uri.query ?? ""
+ let command = uri.absoluteString + (query.isEmpty ? "?" : "&") + "authmod=adobe&user=\(user)"
+ return .success(command)
+ default:
+ return .failure(Error.failedToAuth(description: status.description))
+ }
+ }
+}
diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift
new file mode 100644
index 000000000..f68fcff07
--- /dev/null
+++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift
@@ -0,0 +1,318 @@
+import Foundation
+
+private let kRTMPExtendTimestampSize = 4
+
+enum RTMPChunkError: Swift.Error {
+ case bufferUnderflow
+ case unknowChunkType(value: UInt8)
+}
+
+enum RTMPChunkType: UInt8 {
+ case zero = 0
+ case one = 1
+ case two = 2
+ case three = 3
+
+ var headerSize: Int {
+ switch self {
+ case .zero:
+ return 11
+ case .one:
+ return 7
+ case .two:
+ return 3
+ case .three:
+ return 0
+ }
+ }
+}
+
+enum RTMPChunkStreamId: UInt16 {
+ case control = 0x02
+ case command = 0x03
+ case audio = 0x04
+ case video = 0x05
+ case data = 0x08
+}
+
+final class RTMPChunkMessageHeader {
+ static let chunkSize = 128
+ static let maxTimestamp: UInt32 = 0xFFFFFF
+
+ var timestamp: UInt32 = 0
+ var messageLength: Int = 0 {
+ didSet {
+ guard payload.count != messageLength else {
+ return
+ }
+ payload = Data(count: messageLength)
+ position = 0
+ }
+ }
+ var messageTypeId: UInt8 = 0
+ var messageStreamId: UInt32 = 0
+ private(set) var payload = Data()
+ private var position = 0
+
+ init() {
+ }
+
+ init(timestmap: UInt32, messageLength: Int, messageTypeId: UInt8, messageStreamId: UInt32) {
+ self.timestamp = timestmap
+ self.messageLength = messageLength
+ self.messageTypeId = messageTypeId
+ self.messageStreamId = messageStreamId
+ self.payload = Data(count: messageLength)
+ }
+
+ func put(_ buffer: RTMPChunkBuffer, chunkSize: Int) throws {
+ let length = min(chunkSize, messageLength - position)
+ if buffer.remaining < length {
+ throw RTMPChunkError.bufferUnderflow
+ }
+ self.payload.replaceSubrange(position.. (any RTMPMessage)? {
+ if position < payload.count {
+ return nil
+ }
+ switch messageTypeId {
+ case 0x01:
+ return RTMPSetChunkSizeMessage(self)
+ case 0x02:
+ return RTMPAbortMessge(self)
+ case 0x03:
+ return RTMPAcknowledgementMessage(self)
+ case 0x04:
+ return RTMPUserControlMessage(self)
+ case 0x05:
+ return RTMPWindowAcknowledgementSizeMessage(self)
+ case 0x06:
+ return RTMPSetPeerBandwidthMessage(self)
+ case 0x08:
+ return RTMPAudioMessage(self)
+ case 0x09:
+ return RTMPVideoMessage(self)
+ case 0x0F:
+ return RTMPDataMessage(self, objectEncoding: .amf3)
+ case 0x10:
+ return RTMPSharedObjectMessage(self, objectEncoding: .amf3)
+ case 0x11:
+ return RTMPCommandMessage(self, objectEncoding: .amf3)
+ case 0x12:
+ return RTMPDataMessage(self, objectEncoding: .amf0)
+ case 0x13:
+ return RTMPSharedObjectMessage(self, objectEncoding: .amf0)
+ case 0x14:
+ return RTMPCommandMessage(self, objectEncoding: .amf0)
+ case 0x16:
+ return RTMPAggregateMessage(self)
+ default:
+ return nil
+ }
+ }
+}
+
+final class RTMPChunkBuffer {
+ static let headerSize = 3 + 11 + 4
+
+ var payload: Data {
+ data[position.. Self {
+ length = position
+ position = 0
+ return self
+ }
+
+ func get(_ length: Int) -> Data {
+ defer {
+ position += length
+ }
+ return data[position.. (RTMPChunkType, UInt16) {
+ let rawValue = (data[position] & 0b11000000) >> 6
+ guard let type = RTMPChunkType(rawValue: rawValue) else {
+ throw RTMPChunkError.unknowChunkType(value: rawValue)
+ }
+ switch data[position] & 0b00111111 {
+ case 0:
+ defer {
+ position += 2
+ }
+ return (type, UInt16(data[position + 1]) + 64)
+ case 1:
+ defer {
+ position += 3
+ }
+ return (type, UInt16(data: data[position + 1...position + 2]) + 64)
+ default:
+ defer {
+ position += 1
+ }
+ return (type, UInt16(data[position] & 0b00111111))
+ }
+ }
+
+ func getMessageHeader(_ type: RTMPChunkType, messageHeader: RTMPChunkMessageHeader) throws {
+ if remaining < type.headerSize {
+ throw RTMPChunkError.bufferUnderflow
+ }
+ switch type {
+ case .zero:
+ messageHeader.timestamp = UInt32(data: data[position.. AnyIterator {
+ let payload = message.payload
+ let length = payload.count
+ var offset = 0
+ var remaining = min(chunkSize, length)
+ return AnyIterator { () -> Data? in
+ guard 0 < remaining else {
+ return nil
+ }
+ defer {
+ self.position = 0
+ offset += remaining
+ remaining = min(self.chunkSize, length - offset)
+ }
+ if offset == 0 {
+ self.putBasicHeader(chunkType, chunkStreamId: chunkStreamId)
+ self.putMessageHeader(chunkType, length: length, message: message)
+ } else {
+ self.putBasicHeader(.three, chunkStreamId: chunkStreamId)
+ }
+ self.data.replaceSubrange(self.position..