diff --git a/Common/Localizable.xcstrings b/Common/Localizable.xcstrings index 2746ce6f8..0f2c512f9 100644 --- a/Common/Localizable.xcstrings +++ b/Common/Localizable.xcstrings @@ -31330,9 +31330,6 @@ } } } - }, - "Audio only" : { - }, "Auto" : { "localizations" : { @@ -32962,289 +32959,8 @@ } } }, - "Auto-start" : { - "localizations" : { - "de" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Autostart" - } - }, - "el" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Αυτόματη εκκίνηση" - } - }, - "es" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Inicio automático" - } - }, - "fi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automaattinen käynnistys" - } - }, - "fr" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Démarrage automatique" - } - }, - "hi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "ऑटो स्टार्ट" - } - }, - "id" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Mulai otomatis" - } - }, - "it" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Avvio automatico" - } - }, - "ja" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自動起動" - } - }, - "ko" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "자동 시작" - } - }, - "nl" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatische start" - } - }, - "pl" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatyczne uruchomienie" - } - }, - "pt-BR" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Início automático" - } - }, - "pt-PT" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Início automático" - } - }, - "ru" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Автозапуск" - } - }, - "sk" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatické spustenie" - } - }, - "sv" : { - "stringUnit" : { - "state" : "translated", - "value" : "Autostart" - } - }, - "tr" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Otomatik başlatma" - } - }, - "uk" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Автозапуск" - } - }, - "vi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Tự động khởi động" - } - }, - "zh-Hans" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自动启动" - } - }, - "zh-Hant" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自動啟動" - } - }, - "zh-Hant-TW" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自動啟動" - } - } - } - }, "Auto-stop" : { - "localizations" : { - "de" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatischer Stopp" - } - }, - "el" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Αυτόματη διακοπή" - } - }, - "es" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "parada automática" - } - }, - "fi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automaattinen pysäytys" - } - }, - "fr" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Arrêt automatique" - } - }, - "hi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "स्वतः रुकें" - } - }, - "id" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Berhenti otomatis" - } - }, - "it" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Arresto automatico" - } - }, - "ja" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "オートストップ" - } - }, - "ko" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "자동 정지" - } - }, - "nl" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatische stop" - } - }, - "pl" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatyczne zatrzymanie" - } - }, - "pt-BR" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Parada automática" - } - }, - "pt-PT" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Parada automática" - } - }, - "ru" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Автостоп" - } - }, - "sk" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Automatické zastavenie" - } - }, - "sv" : { - "stringUnit" : { - "state" : "translated", - "value" : "Autostopp" - } - }, - "tr" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Otomatik durdurma" - } - }, - "uk" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Автостоп" - } - }, - "vi" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "Tự động dừng" - } - }, - "zh-Hans" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自动停止" - } - }, - "zh-Hant" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自動停止" - } - }, - "zh-Hant-TW" : { - "stringUnit" : { - "state" : "needs_review", - "value" : "自動停止" - } - } - } + }, "automatic" : { "localizations" : { @@ -60465,6 +60181,9 @@ } } } + }, + "Custom WHIP" : { + }, "Cycling power device" : { "localizations" : { @@ -65053,6 +64772,9 @@ } } } + }, + "Disable the WHIP server to change its settings." : { + }, "Disabled connections will not be used." : { "localizations" : { @@ -70517,6 +70239,9 @@ } } } + }, + "Each stream can receive video from one WHIP publisher on the local network." : { + }, "Effects" : { "localizations" : { @@ -74809,6 +74534,12 @@ } } } + }, + "Enter one of the URLs into the WHIP publisher device to send video to this stream. Usually enter the WiFi or Personal Hotspot URL." : { + + }, + "Enter STUN/TURN URLs, one per line." : { + }, "Estimated viewer delay" : { "localizations" : { @@ -75401,6 +75132,12 @@ } } } + }, + "Example: http://192.168.1.50:8080/live/whip" : { + + }, + "Example: https://example.com/live/whip" : { + }, "Example: rtmp://arn03.contribute.live-video.net/app/live_123321_sdfopjfwjfpawjefpjawef" : { "localizations" : { @@ -76881,6 +76618,9 @@ } } } + }, + "Example: stun:stun.l.google.com:19302" : { + }, "EXB" : { "localizations" : { @@ -79095,9 +78835,6 @@ } } } - }, - "Failed to create QR-code." : { - }, "Failed to create stream marker" : { "localizations" : { @@ -96094,6 +95831,12 @@ } } } + }, + "https://example.com/live/whip" : { + + }, + "ICE servers" : { + }, "Icons to buy" : { "localizations" : { @@ -109996,6 +109739,9 @@ } } } + }, + "Malformed WHIP URL" : { + }, "Manage streams" : { "localizations" : { @@ -111618,6 +111364,9 @@ } } } + }, + "Max retries" : { + }, "Maximum" : { "localizations" : { @@ -117094,9 +116843,6 @@ } } } - }, - "Mode" : { - }, "Model" : { "localizations" : { @@ -129819,6 +129565,9 @@ } } } + }, + "Note: Custom ICE servers may be ignored depending on WHIP backend." : { + }, "NOTE: Only works on Mac as `hevc_videotoolbox` uses Apple’s encoder." : { "localizations" : { @@ -137207,9 +136956,6 @@ } } } - }, - "Periodic, audio and video" : { - }, "Permissions" : { "localizations" : { @@ -139726,6 +139472,9 @@ } } } + }, + "Please use a valid whip:// URL." : { + }, "PNGTuber" : { "localizations" : { @@ -196244,6 +195993,9 @@ } } } + }, + "Template: https://my_domain/my_endpoint" : { + }, "Template: rtmp://[nearby_ingest_endpoint](https://help.twitch.tv/s/twitch-ingest-recommendation)/app/" : { "localizations" : { @@ -202750,6 +202502,9 @@ } } } + }, + "The TCP port the WHIP server listens for publishers on." : { + }, "The UDP port %u will also be used." : { "localizations" : { @@ -203934,6 +203689,9 @@ } } } + }, + "The WHIP server allows Moblin to receive video streams over the network using WebRTC (WHIP)." : { + }, "The zoom (in X) to set when switching to given camera, if enabled." : { "localizations" : { @@ -214442,6 +214200,9 @@ } } } + }, + "Update Settings → Streams → %@ → Video/Audio." : { + }, "Uptime" : { "localizations" : { @@ -216615,7 +216376,7 @@ }, "sv" : { "stringUnit" : { - "state" : "translated", + "state" : "needs_review", "value" : "Använd den webbaserade fjärrkontrollen på en annan enhet för att uppdatera resultattavlan." } }, @@ -216757,7 +216518,7 @@ }, "sv" : { "stringUnit" : { - "state" : "translated", + "state" : "needs_review", "value" : "Använd din Apple Watch för att uppdatera resultattavlan." } }, @@ -221339,7 +221100,7 @@ }, "sv" : { "stringUnit" : { - "state" : "translated", + "state" : "needs_review", "value" : "Volleyboll" } }, @@ -223750,7 +223511,6 @@ } }, "When \"Audio and video only\" is enabled, images, text, GIFs etc. will only be shown when a video (.mp4/.mov) is playing, reducing overall energy consumption." : { - "extractionState" : "stale", "localizations" : { "de" : { "stringUnit" : { @@ -223898,10 +223658,25 @@ } } }, - "When \"Audio and video only\" mode is selected, images, text, GIFs etc. will only be shown when a video (.mp4/.mov) is playing, reducing overall energy consumption." : { + "WHEP client" : { + + }, + "WHEP disconnected" : { + + }, + "WHIP" : { + + }, + "WHIP disconnected" : { + + }, + "WHIP requires H.264 video and Opus audio." : { + + }, + "WHIP server" : { }, - "When \"Audio only\" mode is selected, no video will be rendered at all. Only audio will play." : { + "WHIP uses HTTP/HTTPS endpoints. (whip:// is also accepted for compatibility.)" : { }, "Whirlpool" : { diff --git a/Common/Various/CommonUtils.swift b/Common/Various/CommonUtils.swift index edcb14b6d..205404165 100644 --- a/Common/Various/CommonUtils.swift +++ b/Common/Various/CommonUtils.swift @@ -498,6 +498,7 @@ private let cameraPositionRtmp = "(RTMP)" private let cameraPositionSrtla = "(SRT(LA))" private let cameraPositionRist = "(RIST)" private let cameraPositionRtsp = "(RTSP)" +private let cameraPositionWhep = "(WHEP)" private let cameraPositionMediaPlayer = "(Media player)" func rtmpCamera(name: String) -> String { @@ -532,6 +533,14 @@ func isRtspCameraOrMic(camera: String) -> Bool { return camera.hasSuffix(cameraPositionRtsp) } +func whepCamera(name: String) -> String { + return "\(name) \(cameraPositionWhep)" +} + +func isWhepCameraOrMic(camera: String) -> Bool { + return camera.hasSuffix(cameraPositionWhep) +} + func mediaPlayerCamera(name: String) -> String { return "\(name) \(cameraPositionMediaPlayer)" } diff --git a/Common/Various/Validate.swift b/Common/Various/Validate.swift index 2da27d9ac..8d74e547a 100644 --- a/Common/Various/Validate.swift +++ b/Common/Various/Validate.swift @@ -95,6 +95,14 @@ func isValidUrl(url value: String, if let message = isValidRtmpUrl(url: value, rtmpStreamKeyRequired: rtmpStreamKeyRequired) { return message } + case "http": + break + case "https": + break + case "whip": + break + case "whips": + break case "srt": if let message = isValidSrtUrl(url: value) { return message diff --git a/Moblin Screen Recording/Moblin Screen Recording.entitlements b/Moblin Screen Recording/Moblin Screen Recording.entitlements index 4e32970bb..fe1f1756d 100644 --- a/Moblin Screen Recording/Moblin Screen Recording.entitlements +++ b/Moblin Screen Recording/Moblin Screen Recording.entitlements @@ -4,7 +4,7 @@ com.apple.security.application-groups - group.com.eerimoq.Moblin + group.io.meetmo.mocam diff --git a/Moblin.xcodeproj/project.pbxproj b/Moblin.xcodeproj/project.pbxproj index 7bd4745c2..15ab5c56f 100644 --- a/Moblin.xcodeproj/project.pbxproj +++ b/Moblin.xcodeproj/project.pbxproj @@ -34,6 +34,8 @@ 03ECDF532B8E4E6000BD920E /* Moblin.app in Embed Watch Content */ = {isa = PBXBuildFile; fileRef = 03ECDF462B8E4E5E00BD920E /* Moblin.app */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; 03ECDF5D2B8E5F0B00BD920E /* WrappingHStack in Frameworks */ = {isa = PBXBuildFile; productRef = 03ECDF5C2B8E5F0B00BD920E /* WrappingHStack */; }; 03F465EC2C441D1400630708 /* CrcSwift in Frameworks */ = {isa = PBXBuildFile; productRef = 03F465EB2C441D1400630708 /* CrcSwift */; }; + 08A1B2C3D4E5F67890123459 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 08A1B2C3D4E5F67890123457 /* HaishinKit */; }; + 08A1B2C3D4E5F6789012345A /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */; }; 882D0C162DF76F5B0035BFAF /* BlackSharkLib in Frameworks */ = {isa = PBXBuildFile; productRef = 882D0C152DF76F5B0035BFAF /* BlackSharkLib */; }; /* End PBXBuildFile section */ @@ -230,6 +232,8 @@ 03A08B7C2AC295620018BA95 /* AlertToast in Frameworks */, 0377239C2DE35191007D040D /* VRMSceneKit in Frameworks */, 03BC116B2AE56C2200C38FC4 /* SDWebImageWebPCoder in Frameworks */, + 08A1B2C3D4E5F67890123459 /* HaishinKit in Frameworks */, + 08A1B2C3D4E5F6789012345A /* RTCHaishinKit in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -364,6 +368,8 @@ 035351932F1C271700428DAC /* AppAuthCore */, 035351952F1C27A500428DAC /* AppAuth */, 0360FD152F228EEB00FF8847 /* MetalPetal */, + 08A1B2C3D4E5F67890123457 /* HaishinKit */, + 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */, ); productName = Mobs; productReference = 035E9E332A9A02D6009D4F5A /* Moblin.app */; @@ -519,6 +525,7 @@ 882D0C142DF76F5B0035BFAF /* XCRemoteSwiftPackageReference "BlackSharkLib" */, 035351922F1C271700428DAC /* XCRemoteSwiftPackageReference "AppAuth-iOS" */, 0360FD142F228EEB00FF8847 /* XCRemoteSwiftPackageReference "MetalPetal" */, + 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */, ); productRefGroup = 035E9E342A9A02D6009D4F5A /* Products */; projectDirPath = ""; @@ -643,6 +650,7 @@ CODE_SIGN_ENTITLEMENTS = "Moblin Screen Recording/Moblin Screen Recording.entitlements"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; GENERATE_INFOPLIST_FILE = YES; @@ -661,7 +669,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 30.2027.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.eerimoq.Mobs.Moblin-Capture"; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Capture; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; @@ -680,6 +688,7 @@ CODE_SIGN_ENTITLEMENTS = "Moblin Screen Recording/Moblin Screen Recording.entitlements"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; GENERATE_INFOPLIST_FILE = YES; @@ -698,7 +707,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 30.2027.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.eerimoq.Mobs.Moblin-Capture"; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Capture; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; @@ -857,10 +866,12 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEVELOPMENT_ASSET_PATHS = ""; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = Moblin/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = Moblin; + INFOPLIST_KEY_ITSAppUsesNonExemptEncryption = NO; INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.video"; INFOPLIST_KEY_NSBluetoothAlwaysUsageDescription = "Moblin communicates with DJI cameras using Bluetooth."; INFOPLIST_KEY_NSCameraUsageDescription = "Moblin live streams video from the camera."; @@ -889,7 +900,7 @@ MTLLINKER_FLAGS = "-fcikernel"; MTL_COMPILER_FLAGS = "-fcikernel"; MTL_HEADER_SEARCH_PATHS = "$(HEADER_SEARCH_PATHS)"; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam; PRODUCT_NAME = "$(TARGET_NAME)"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = NO; @@ -912,10 +923,12 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEVELOPMENT_ASSET_PATHS = ""; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = Moblin/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = Moblin; + INFOPLIST_KEY_ITSAppUsesNonExemptEncryption = NO; INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.video"; INFOPLIST_KEY_NSBluetoothAlwaysUsageDescription = "Moblin communicates with DJI cameras using Bluetooth."; INFOPLIST_KEY_NSCameraUsageDescription = "Moblin live streams video from the camera."; @@ -944,7 +957,7 @@ MTLLINKER_FLAGS = "-fcikernel"; MTL_COMPILER_FLAGS = "-fcikernel"; MTL_HEADER_SEARCH_PATHS = "$(HEADER_SEARCH_PATHS)"; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam; PRODUCT_NAME = "$(TARGET_NAME)"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = NO; @@ -963,6 +976,7 @@ ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; GENERATE_INFOPLIST_FILE = YES; @@ -981,7 +995,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 0.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch.Widget; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch.Widget; PRODUCT_NAME = "$(TARGET_NAME)"; SDKROOT = watchos; SKIP_INSTALL = YES; @@ -1000,6 +1014,7 @@ ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; GENERATE_INFOPLIST_FILE = YES; @@ -1018,7 +1033,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 0.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch.Widget; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch.Widget; PRODUCT_NAME = "$(TARGET_NAME)"; SDKROOT = watchos; SKIP_INSTALL = YES; @@ -1081,6 +1096,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEVELOPMENT_ASSET_PATHS = "\"Moblin Watch/Preview Content\""; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_PREVIEWS = YES; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; @@ -1094,7 +1110,7 @@ INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = Yes; INFOPLIST_KEY_NSPhotoLibraryUsageDescription = Yes; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; - INFOPLIST_KEY_WKCompanionAppBundleIdentifier = com.eerimoq.Mobs; + INFOPLIST_KEY_WKCompanionAppBundleIdentifier = io.webmo.mocam; INFOPLIST_KEY_WKRunsIndependentlyOfCompanionApp = NO; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", @@ -1102,7 +1118,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 0.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch; PRODUCT_NAME = Moblin; SDKROOT = watchos; SKIP_INSTALL = YES; @@ -1123,6 +1139,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEVELOPMENT_ASSET_PATHS = "\"Moblin Watch/Preview Content\""; + DEVELOPMENT_TEAM = 442HHYCXW8; ENABLE_PREVIEWS = YES; ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = gnu17; @@ -1136,7 +1153,7 @@ INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = Yes; INFOPLIST_KEY_NSPhotoLibraryUsageDescription = Yes; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; - INFOPLIST_KEY_WKCompanionAppBundleIdentifier = com.eerimoq.Mobs; + INFOPLIST_KEY_WKCompanionAppBundleIdentifier = io.webmo.mocam; INFOPLIST_KEY_WKRunsIndependentlyOfCompanionApp = NO; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", @@ -1144,7 +1161,7 @@ ); LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MARKETING_VERSION = 0.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.eerimoq.Mobs.Watch; + PRODUCT_BUNDLE_IDENTIFIER = io.webmo.mocam.Watch; PRODUCT_NAME = Moblin; SDKROOT = watchos; SKIP_INSTALL = YES; @@ -1214,6 +1231,13 @@ }; /* End XCConfigurationList section */ +/* Begin XCLocalSwiftPackageReference section */ + 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = Vendor/HaishinKit.swift; + }; +/* End XCLocalSwiftPackageReference section */ + /* Begin XCRemoteSwiftPackageReference section */ 0318D3682CF51D6900E12F3B /* XCRemoteSwiftPackageReference "swift-protobuf" */ = { isa = XCRemoteSwiftPackageReference; @@ -1459,6 +1483,16 @@ package = 03F465EA2C441D1400630708 /* XCRemoteSwiftPackageReference "CrcSwift" */; productName = CrcSwift; }; + 08A1B2C3D4E5F67890123457 /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + package = 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */; + productName = HaishinKit; + }; + 08A1B2C3D4E5F67890123458 /* RTCHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + package = 08A1B2C3D4E5F67890123456 /* XCLocalSwiftPackageReference "Vendor/HaishinKit.swift" */; + productName = RTCHaishinKit; + }; 882D0C152DF76F5B0035BFAF /* BlackSharkLib */ = { isa = XCSwiftPackageProductDependency; package = 882D0C142DF76F5B0035BFAF /* XCRemoteSwiftPackageReference "BlackSharkLib" */; diff --git a/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index e65b22013..65e1040e2 100644 --- a/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/Moblin.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -1,5 +1,5 @@ { - "originHash" : "2ec89183ce973e73157e1f6178f92aabac79182102b81c7f9097c9d2d72f6022", + "originHash" : "cd2189c0bc4ce9b8808eccb14601cdb6dff0bacbf693a1ee023eef73c22b2903", "pins" : [ { "identity" : "alerttoast", @@ -55,6 +55,15 @@ "version" : "2.2.0" } }, + { + "identity" : "logboard", + "kind" : "remoteSourceControl", + "location" : "https://github.com/shogo4405/Logboard.git", + "state" : { + "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50", + "version" : "2.6.0" + } + }, { "identity" : "metalpetal", "kind" : "remoteSourceControl", diff --git a/Moblin/Info.plist b/Moblin/Info.plist index 80dbfc1a7..c32f3f5fe 100644 --- a/Moblin/Info.plist +++ b/Moblin/Info.plist @@ -17,8 +17,6 @@ - ITSAppUsesNonExemptEncryption - NSAppTransportSecurity NSAllowsArbitraryLoads @@ -36,6 +34,23 @@ IntentIntent MuteIntent + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + UISceneConfigurations + + UIWindowSceneSessionRoleExternalDisplayNonInteractive + + + UISceneConfigurationName + External Display + UISceneDelegateClassName + $(PRODUCT_MODULE_NAME).SceneDelegate + + + + UIBackgroundModes audio @@ -44,34 +59,15 @@ UIFileSharingEnabled - UIApplicationSceneManifest - - UIApplicationSupportsMultipleScenes - - UISceneConfigurations - - UIWindowSceneSessionRoleExternalDisplayNonInteractive - - - UISceneDelegateClassName - $(PRODUCT_MODULE_NAME).SceneDelegate - UISceneConfigurationName - External Display - - - - - CFBundleIdentifier - - WiFiAwareServices - - _moblin._udp - - Publishable - - Subscribable - - - + WiFiAwareServices + + _moblin._udp + + Publishable + + Subscribable + + + diff --git a/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift b/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift index 56abd8bb6..f9ae37466 100644 --- a/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift +++ b/Moblin/Media/HaishinKit/Codec/Video/VideoDecoder.swift @@ -11,6 +11,7 @@ class VideoDecoder { private var formatDescription: CMFormatDescription? weak var delegate: (any VideoDecoderDelegate)? private var invalidateSession = true + private var consecutiveBadFrames = 0 private var session: VTDecompressionSession? { didSet { oldValue?.invalidate() @@ -30,6 +31,15 @@ class VideoDecoder { } } + /// Synchronously sets the format description and marks the session for recreation. + /// Use when `decodeSampleBuffer` will be called on the same queue immediately after, + /// avoiding the race condition with the async `startRunning(formatDescription:)`. + func setFormatDescriptionSync(_ formatDescription: CMFormatDescription) { + self.formatDescription = formatDescription + self.invalidateSession = true + self.isRunning = true + } + func stopRunning() { lockQueue.async { self.session = nil @@ -55,6 +65,15 @@ class VideoDecoder { } guard let imageBuffer, status == noErr else { logger.info("video-decoder: Failed to decode frame status \(status)") + // Recover from persistent bad-data errors (e.g., after app state change + // disrupts the hardware decoder). Recreate session on next IDR. + self.lockQueue.async { + self.consecutiveBadFrames += 1 + if self.consecutiveBadFrames >= 3 { + self.invalidateSession = true + self.consecutiveBadFrames = 0 + } + } return } guard let formatDescription = CMVideoFormatDescription.create(imageBuffer: imageBuffer) else { @@ -69,6 +88,7 @@ class VideoDecoder { return } self.lockQueue.async { + self.consecutiveBadFrames = 0 self.delegate?.videoDecoderOutputSampleBuffer(self, sampleBuffer) } } diff --git a/Moblin/Media/HaishinKit/WhipStream.swift b/Moblin/Media/HaishinKit/WhipStream.swift new file mode 100644 index 000000000..39c9e9c57 --- /dev/null +++ b/Moblin/Media/HaishinKit/WhipStream.swift @@ -0,0 +1,172 @@ +import AVFoundation +import Foundation + +import HaishinKit +import RTCHaishinKit + +protocol WhipStreamDelegate: AnyObject { + func whipStreamOnConnected() + func whipStreamOnDisconnected(reason: String) +} + +final class WhipStream: NSObject { + private let processor: Processor + private weak var delegate: (any WhipStreamDelegate)? + + private var session: (any Session)? + private var rtcStream: RTCStream? + private var readyStateTask: Task? + private var didReportConnected = false + + init(processor: Processor, delegate: WhipStreamDelegate) { + self.processor = processor + self.delegate = delegate + } + + func start( + endpointUrl: URL, + settings: SettingsStreamWhip, + videoDimensions: CMVideoDimensions + ) { + Task { [weak self] in + guard let self else { return } + await self.startInternal( + endpointUrl: endpointUrl, + settings: settings, + videoDimensions: videoDimensions + ) + } + } + + func stop() { + Task { [weak self] in + guard let self else { return } + await self.stopInternal() + } + } + + private func startInternal( + endpointUrl: URL, + settings: SettingsStreamWhip, + videoDimensions: CMVideoDimensions + ) async { + await stopInternal() + didReportConnected = false + do { + guard let session = try await SessionBuilderFactory.shared + .make(endpointUrl) + .setMode(.publish) + .setConfiguration(nil) + .build() + else { + throw NSError(domain: "Moblin", code: 2, userInfo: [ + NSLocalizedDescriptionKey: "WHIP session could not be created", + ]) + } + self.session = session + await session.setMaxRetryCount(settings.maxRetryCount) + + let rtcStream = (await session.stream) as? RTCStream + guard let rtcStream else { + throw NSError(domain: "Moblin", code: 1, userInfo: [ + NSLocalizedDescriptionKey: "WHIP session stream is not RTCStream", + ]) + } + self.rtcStream = rtcStream + + await rtcStream.setDirection(.sendonly) + try await rtcStream.setAudioSettings(.init(channelMap: [0, 0], format: .opus)) + try await rtcStream.setVideoSettings(.init(videoSize: .init( + width: Double(videoDimensions.width), + height: Double(videoDimensions.height) + ))) + + readyStateTask = Task { [weak self] in + guard let self else { return } + for await state in await session.readyState { + switch state { + case .open: + processorControlQueue.async { + self.processor.startEncoding(self) + guard !self.didReportConnected else { return } + self.didReportConnected = true + DispatchQueue.main.async { + self.delegate?.whipStreamOnConnected() + } + } + case .closing, .closed: + processorControlQueue.async { + self.processor.stopEncoding(self) + } + default: + break + } + } + } + + try await session.connect { [weak self] in + guard let self else { return } + processorControlQueue.async { + self.processor.stopEncoding(self) + } + DispatchQueue.main.async { + self.delegate?.whipStreamOnDisconnected(reason: String(localized: "WHIP disconnected")) + } + } + } catch { + processorControlQueue.async { [weak self] in + guard let self else { return } + self.processor.stopEncoding(self) + } + DispatchQueue.main.async { [weak self] in + self?.delegate?.whipStreamOnDisconnected(reason: "WHIP connect failed: \(error)") + } + await stopInternal() + } + } + + private func stopInternal() async { + readyStateTask?.cancel() + readyStateTask = nil + didReportConnected = false + + processorControlQueue.async { [weak self] in + guard let self else { return } + self.processor.stopEncoding(self) + } + + do { + try await session?.close() + } catch { + // Best effort close. + } + self.session = nil + self.rtcStream = nil + } +} + +extension WhipStream: AudioEncoderDelegate { + func audioEncoderOutputFormat(_: AVAudioFormat) {} + + func audioEncoderOutputBuffer(_ buffer: AVAudioCompressedBuffer, _ presentationTimeStamp: CMTime) { + guard let rtcStream else { return } + let sampleRate = processor.getAudioEncoder().getSampleRate() ?? 48_000 + let sampleTime = AVAudioFramePosition(presentationTimeStamp.seconds * sampleRate) + let when = AVAudioTime(sampleTime: sampleTime, atRate: sampleRate) + Task { await rtcStream.append(buffer, when: when) } + } +} + +extension WhipStream: VideoEncoderDelegate { + func videoEncoderOutputFormat(_: VideoEncoder, _: CMFormatDescription) {} + + func videoEncoderOutputSampleBuffer( + _: VideoEncoder, + _ sampleBuffer: CMSampleBuffer, + _: CMTime + ) { + guard let rtcStream else { return } + Task { await rtcStream.append(sampleBuffer) } + } +} + diff --git a/Moblin/Media/WhepClient/WhepClient.swift b/Moblin/Media/WhepClient/WhepClient.swift new file mode 100644 index 000000000..3d5fc0f1c --- /dev/null +++ b/Moblin/Media/WhepClient/WhepClient.swift @@ -0,0 +1,198 @@ +import AVFoundation +import CoreMedia +import Foundation + +import HaishinKit +import RTCHaishinKit + +protocol WhepClientDelegate: AnyObject { + func whepClientErrorToast(title: String) + func whepClientConnected(cameraId: UUID) + func whepClientDisconnected(cameraId: UUID, reason: String) + func whepClientOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) + func whepClientOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) +} + +private final class WhepClientStreamOutput: StreamOutput, @unchecked Sendable { + private let cameraId: UUID + private weak var delegate: (any WhepClientDelegate)? + private let latency: Double // seconds + private let lock = NSLock() + // Video PTS retiming (RTSP-style): basePts + (framePts - firstFramePts) + latency + private var basePts: Double = -1 + private var firstFramePts: Double = -1 + private var lastOutputPts: Double = -1 + // Audio PTS retiming + private var audioBasePts: Double = -1 + private var firstAudioPts: Double = -1 + + init(cameraId: UUID, delegate: (any WhepClientDelegate)?, latency: Double) { + self.cameraId = cameraId + self.delegate = delegate + self.latency = latency + } + + func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + guard let audio = audio as? AVAudioPCMBuffer else { + return + } + let audioSeconds = AVAudioTime.seconds(forHostTime: when.hostTime) + lock.lock() + if audioBasePts < 0 { + audioBasePts = currentPresentationTimeStamp().seconds + firstAudioPts = audioSeconds + } + let newPtsSeconds = audioBasePts + (audioSeconds - firstAudioPts) + latency + lock.unlock() + let pts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000) + guard let sampleBuffer = audio.makeSampleBuffer(pts) else { + return + } + delegate?.whepClientOnAudioBuffer(cameraId: cameraId, sampleBuffer) + } + + func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) { + let framePts = video.presentationTimeStamp.seconds + lock.lock() + if basePts < 0 { + basePts = currentPresentationTimeStamp().seconds + firstFramePts = framePts + } + var newPtsSeconds = basePts + (framePts - firstFramePts) + latency + // Ensure monotonic (never go backwards). + if newPtsSeconds <= lastOutputPts { + newPtsSeconds = lastOutputPts + 0.001 + } + lastOutputPts = newPtsSeconds + lock.unlock() + let newPts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000) + if let retimed = video.replacePresentationTimeStamp(newPts) { + delegate?.whepClientOnVideoBuffer(cameraId: cameraId, retimed) + } else { + delegate?.whepClientOnVideoBuffer(cameraId: cameraId, video) + } + } +} + +final class WhepClient: NSObject { + private let cameraId: UUID + private let url: URL + private let latency: Double + + weak var delegate: (any WhepClientDelegate)? + + private var session: (any Session)? + private var rtcStream: RTCStream? + private var readyStateTask: Task? + private var didReportConnected = false + + init(cameraId: UUID, url: URL, latency: Double) { + self.cameraId = cameraId + self.url = url + self.latency = latency + super.init() + } + + func start() { + Task { [weak self] in + guard let self else { return } + await self.startInternal() + } + } + + func stop() { + Task { [weak self] in + guard let self else { return } + await self.stopInternal() + } + } + + private func startInternal() async { + await stopInternal() + didReportConnected = false + + do { + guard let session = try await SessionBuilderFactory.shared + .make(url) + .setMode(.playback) + .setConfiguration(nil) + .build() + else { + throw NSError(domain: "Moblin", code: 3, userInfo: [ + NSLocalizedDescriptionKey: "WHEP session could not be created", + ]) + } + self.session = session + + let rtcStream = (await session.stream) as? RTCStream + guard let rtcStream else { + throw NSError(domain: "Moblin", code: 4, userInfo: [ + NSLocalizedDescriptionKey: "WHEP session stream is not RTCStream", + ]) + } + self.rtcStream = rtcStream + await rtcStream.setDirection(.recvonly) + await rtcStream.addOutput(WhepClientStreamOutput(cameraId: cameraId, delegate: delegate, latency: latency)) + + readyStateTask = Task { [weak self] in + guard let self else { return } + for await state in await session.readyState { + switch state { + case .open: + guard !self.didReportConnected else { break } + self.didReportConnected = true + DispatchQueue.main.async { + self.delegate?.whepClientConnected(cameraId: self.cameraId) + } + case .closing, .closed: + DispatchQueue.main.async { + self.delegate?.whepClientDisconnected( + cameraId: self.cameraId, + reason: String(localized: "WHEP disconnected") + ) + } + default: + break + } + } + } + + try await session.connect { [weak self] in + guard let self else { return } + DispatchQueue.main.async { + self.delegate?.whepClientDisconnected( + cameraId: self.cameraId, + reason: String(localized: "WHEP disconnected") + ) + } + } + } catch { + DispatchQueue.main.async { [weak self] in + guard let self else { return } + self.delegate?.whepClientErrorToast(title: "WHEP connect failed: \(error)") + self.delegate?.whepClientDisconnected(cameraId: self.cameraId, reason: "\(error)") + } + await stopInternal() + } + } + + private func stopInternal() async { + readyStateTask?.cancel() + readyStateTask = nil + didReportConnected = false + + do { + try await session?.close() + } catch { + // Best effort close. + } + + if let rtcStream { + await rtcStream.removeAllOutputs() + } + self.rtcStream = nil + self.session = nil + _ = latency // keep for potential future reconnect jitter logic + } +} + diff --git a/Moblin/Media/WhipServer/WhipServer.swift b/Moblin/Media/WhipServer/WhipServer.swift new file mode 100644 index 000000000..2a33988af --- /dev/null +++ b/Moblin/Media/WhipServer/WhipServer.swift @@ -0,0 +1,593 @@ +import AVFoundation +import CoreMedia +import Foundation +import HaishinKit +import Network +import RTCHaishinKit + +let whipServerDispatchQueue = DispatchQueue(label: "com.eerimoq.whip-server") + +protocol WhipServerDelegate: AnyObject { + func whipServerOnPublishStart(streamKey: String) + func whipServerOnPublishStop(streamKey: String, reason: String) + func whipServerOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) + func whipServerOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) +} + +/// Handles audio from IncomingStream (Opus → PCM via AudioCodec) and retimes PTS. +/// Video is handled separately by WhipServerVideoDecoder via onCompressedVideo. +private final class WhipServerAudioOutput: StreamOutput, @unchecked Sendable { + private let cameraId: UUID + private weak var delegate: (any WhipServerDelegate)? + private let latency: Double + private let lock = NSLock() + private var audioBasePts: Double = -1 + private var firstAudioPts: Double = -1 + + init(cameraId: UUID, delegate: (any WhipServerDelegate)?, latency: Double) { + self.cameraId = cameraId + self.delegate = delegate + self.latency = latency + } + + func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + guard let audio = audio as? AVAudioPCMBuffer else { + return + } + let audioSeconds = AVAudioTime.seconds(forHostTime: when.hostTime) + lock.lock() + if audioBasePts < 0 { + audioBasePts = currentPresentationTimeStamp().seconds + firstAudioPts = audioSeconds + } + let newPtsSeconds = audioBasePts + (audioSeconds - firstAudioPts) + latency + lock.unlock() + let pts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000) + guard let sampleBuffer = audio.makeSampleBuffer(pts) else { + return + } + delegate?.whipServerOnAudioBuffer(cameraId: cameraId, sampleBuffer) + } + + func stream(_: some StreamConvertible, didOutput _: CMSampleBuffer) { + // Video is handled by WhipServerVideoDecoder, not through RTCStream outputs. + } +} + +/// Decodes compressed H264 video from RTCTrack and delivers decoded frames to +/// BufferedVideo. Matches the RTMP server approach: retime PTS BEFORE decode, +/// use Moblin's VideoDecoder, direct delivery — no MediaLink or IncomingStream. +private final class WhipServerVideoDecoder: @unchecked Sendable { + private let cameraId: UUID + private weak var delegate: (any WhipServerDelegate)? + private let latency: Double + private let lockQueue = DispatchQueue(label: "com.eerimoq.whip-video-decoder") + private let decoder: VideoDecoder + private var basePts: Double = -1 + private var firstFramePts: Double = -1 + private var lastOutputPts: Double = -1 + private var currentFormatDescription: CMFormatDescription? + + init(cameraId: UUID, delegate: (any WhipServerDelegate)?, latency: Double) { + self.cameraId = cameraId + self.delegate = delegate + self.latency = latency + decoder = VideoDecoder(lockQueue: lockQueue) + } + + func start() { + decoder.delegate = self + } + + func stop() { + decoder.stopRunning() + } + + /// Called from the RTCTrack callback thread with compressed H264 CMSampleBuffer. + func handleCompressedVideo(_ buffer: CMSampleBuffer) { + lockQueue.async { [weak self] in + self?.handleCompressedVideoInternal(buffer) + } + } + + private func handleCompressedVideoInternal(_ buffer: CMSampleBuffer) { + // Update decoder session when format description changes (new SPS/PPS). + // Use setFormatDescriptionSync so the format description is available + // IMMEDIATELY for the decodeSampleBuffer call below (same queue). + if let fd = buffer.formatDescription, fd != currentFormatDescription { + currentFormatDescription = fd + decoder.setFormatDescriptionSync(fd) + } + + // Retime PTS before decode (matches RTMP server approach). + let framePts = buffer.presentationTimeStamp.seconds + if basePts < 0 { + basePts = currentPresentationTimeStamp().seconds + firstFramePts = framePts + } + var newPtsSeconds = basePts + (framePts - firstFramePts) + latency + if newPtsSeconds <= lastOutputPts { + newPtsSeconds = lastOutputPts + 0.001 + } + lastOutputPts = newPtsSeconds + + // Discard stale frames whose PTS is already in the past. + // This prevents burst playback of accumulated frames after decode errors. + let now = currentPresentationTimeStamp().seconds + if newPtsSeconds < now - 0.1 { + // Frame is more than 100ms in the past — skip it. + // Reset base PTS so the next frame starts fresh relative to "now". + basePts = -1 + firstFramePts = -1 + lastOutputPts = -1 + return + } + + let newPts = CMTime(seconds: newPtsSeconds, preferredTimescale: 1_000_000_000) + if let retimed = buffer.replacePresentationTimeStamp(newPts) { + decoder.decodeSampleBuffer(retimed) + } + } +} + +extension WhipServerVideoDecoder: VideoDecoderDelegate { + func videoDecoderOutputSampleBuffer(_: VideoDecoder, _ sampleBuffer: CMSampleBuffer) { + delegate?.whipServerOnVideoBuffer(cameraId: cameraId, sampleBuffer) + } +} + +private final class WhipServerSession: NSObject, RTCPeerConnectionDelegate { + let streamKey: String + let cameraId: UUID + let peerConnection: RTCPeerConnection + let stream: RTCStream + let videoDecoder: WhipServerVideoDecoder + weak var delegate: (any WhipServerDelegate)? + private let onTerminated: @Sendable () -> Void + private var terminated = false + private var didConnect = false + private var pendingTerminateWorkItem: DispatchWorkItem? + private let localCandidatesLock = NSLock() + private var localCandidates: [RTCIceCandidate] = [] + + init( + streamKey: String, + cameraId: UUID, + peerConnection: RTCPeerConnection, + stream: RTCStream, + videoDecoder: WhipServerVideoDecoder, + delegate: (any WhipServerDelegate)?, + onTerminated: @escaping @Sendable () -> Void + ) { + self.streamKey = streamKey + self.cameraId = cameraId + self.peerConnection = peerConnection + self.stream = stream + self.videoDecoder = videoDecoder + self.delegate = delegate + self.onTerminated = onTerminated + super.init() + peerConnection.delegate = self + peerConnection.attachIncomingStream(stream) + } + + func close(reason: String) { + terminate(reason: reason) + videoDecoder.stop() + peerConnection.close() + Task { await stream.close() } + } + + func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged connectionState: RTCPeerConnection.ConnectionState) { + logger.info("whip-server: \(streamKey) state=\(connectionState)") + switch connectionState { + case .connected: + didConnect = true + pendingTerminateWorkItem?.cancel() + pendingTerminateWorkItem = nil + delegate?.whipServerOnPublishStart(streamKey: streamKey) + case .closed, .failed, .disconnected: + // Some WHIP clients (e.g. ffmpeg) send 0 candidates in the initial offer and then trickle via PATCH. + // libdatachannel may temporarily report a failed/disconnected state before remote candidates arrive. + // Give it a short grace period before tearing down the session. + if didConnect { + terminate(reason: "\(connectionState)") + } else { + pendingTerminateWorkItem?.cancel() + let work = DispatchWorkItem { [weak self] in + guard let self else { return } + self.terminate(reason: "\(connectionState)") + } + pendingTerminateWorkItem = work + whipServerDispatchQueue.asyncAfter(deadline: .now() + 3.0, execute: work) + } + default: + break + } + } + + func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged iceGatheringState: RTCPeerConnection.IceGatheringState) {} + func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState) {} + func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState) {} + func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {} + func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate) { + let line = candidated.candidate.trimmingCharacters(in: .whitespacesAndNewlines) + guard !line.isEmpty else { + return + } + let lower = line.lowercased() + if lower.contains(" tcp ") || lower.contains(" fe80:") { + return + } + logger.info("whip-server: \(streamKey) local-candidate mid=\(candidated.mid) \(line)") + localCandidatesLock.lock() + localCandidates.append(candidated) + localCandidatesLock.unlock() + } + + func getLocalCandidates() -> [RTCIceCandidate] { + localCandidatesLock.lock() + defer { localCandidatesLock.unlock() } + return localCandidates + } + + private func terminate(reason: String) { + guard !terminated else { + return + } + terminated = true + delegate?.whipServerOnPublishStop(streamKey: streamKey, reason: reason) + onTerminated() + } +} + +final class WhipServer { + weak var delegate: (any WhipServerDelegate)? + var settings: SettingsWhipServer + + private let httpServer: HttpServer + private var sessionsByStreamKey: [String: WhipServerSession] = [:] + + init(settings: SettingsWhipServer) { + self.settings = settings + httpServer = HttpServer(queue: whipServerDispatchQueue, routes: []) + rebuildRoutes() + } + + func start() { + rebuildRoutes() + httpServer.start(port: NWEndpoint.Port(rawValue: settings.port) ?? .http) + } + + func stop() { + whipServerDispatchQueue.async { + self.httpServer.stop() + for (_, session) in self.sessionsByStreamKey { + session.close(reason: "Server stop") + } + self.sessionsByStreamKey.removeAll() + } + } + + func isStreamConnected(streamKey: String) -> Bool { + whipServerDispatchQueue.sync { + sessionsByStreamKey[streamKey] != nil + } + } + + private func rebuildRoutes() { + var routes: [HttpServerRoute] = [] + for stream in settings.streams { + let path = "/whip/\(stream.streamKey)" + routes.append(HttpServerRoute(path: path) { [weak self] request, response in + self?.handleRequest(stream: stream, request: request, response: response) + }) + } + httpServer.setRoutes(routes) + } + + private func handleRequest(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) { + switch request.method.uppercased() { + case "POST": + handlePost(stream: stream, request: request, response: response) + case "PATCH": + handlePatch(stream: stream, request: request, response: response) + case "DELETE": + handleDelete(stream: stream, response: response) + default: + response.send(text: "", status: .methodNotAllowed) + } + } + + private func handleDelete(stream: SettingsWhipServerStream, response: HttpServerResponse) { + whipServerDispatchQueue.async { + if let session = self.sessionsByStreamKey[stream.streamKey] { + session.close(reason: "Client delete") + self.sessionsByStreamKey[stream.streamKey] = nil + } + response.send(text: "", status: .ok) + } + } + + private func handlePatch(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) { + // Trickle ICE: application/trickle-ice-sdpfrag (RFC 8840) + whipServerDispatchQueue.async { + guard let session = self.sessionsByStreamKey[stream.streamKey] else { + response.send(text: "", status: .notFound) + return + } + guard let frag = String(data: request.body, encoding: .utf8), !frag.isEmpty else { + response.send(text: "", status: .badRequest) + return + } + do { + let (candidates, mid) = Self.parseTrickleIceSdpFrag(frag) + logger.info("whip-server: patch streamKey=\(stream.streamKey) mid=\(mid ?? "-") candidates=\(candidates.count)") + for candidate in candidates { + try session.peerConnection.addRemoteCandidate(candidate, mid: mid) + } + response.send(text: "", status: .noContent) + } catch { + logger.info("whip-server: patch error: \(error)") + response.send(text: "", status: .badRequest) + } + } + } + + private func handlePost(stream: SettingsWhipServerStream, request: HttpServerRequest, response: HttpServerResponse) { + if let contentType = request.header("content-type"), !contentType.hasPrefix("application/sdp") { + response.send(text: "", status: .unsupportedMediaType) + return + } + guard let offer = String(data: request.body, encoding: .utf8), !offer.isEmpty else { + response.send(text: "", status: .badRequest) + return + } + whipServerDispatchQueue.async { + if let existing = self.sessionsByStreamKey[stream.streamKey] { + existing.close(reason: "Replaced by new publisher") + self.sessionsByStreamKey[stream.streamKey] = nil + } + Task { + do { + let (sanitizedOffer, removedCandidates) = Self.sanitizeOfferSdp(offer) + logger.info( + "whip-server: received offer for \(stream.streamKey) (\(offer.count) bytes), " + + "candidates=\(Self.countCandidates(offer)), removedCandidates=\(removedCandidates)" + ) + let latency = min(Double(stream.latency) / 1000.0, 0.5) + + // --- Video path (RTMP-style): compressed RTP → retime PTS → VideoDecoder → BufferedVideo --- + let videoDecoder = WhipServerVideoDecoder( + cameraId: stream.id, + delegate: self.delegate, + latency: latency + ) + videoDecoder.start() + + // --- Audio path: RTCTrack → IncomingStream/AudioCodec (Opus→PCM) → WhipServerAudioOutput --- + let rtcStream = RTCStream() + await rtcStream.setDirection(.recvonly) + await rtcStream.addOutput(WhipServerAudioOutput( + cameraId: stream.id, + delegate: self.delegate, + latency: latency + )) + + let peerConnection = try RTCPeerConnection(RTCConfiguration()) + // Set onCompressedVideo BEFORE setRemoteDescription so that when + // libdatachannel fires the track callback, video tracks are routed + // to our VideoDecoder. Audio tracks go to incomingStream (RTCStream). + peerConnection.onCompressedVideo = { [weak videoDecoder] buffer in + videoDecoder?.handleCompressedVideo(buffer) + } + peerConnection.attachIncomingStream(rtcStream) + let session = WhipServerSession( + streamKey: stream.streamKey, + cameraId: stream.id, + peerConnection: peerConnection, + stream: rtcStream, + videoDecoder: videoDecoder, + delegate: self.delegate, + onTerminated: { [weak self] in + whipServerDispatchQueue.async { + self?.sessionsByStreamKey[stream.streamKey] = nil + } + } + ) + + try peerConnection.setRemoteDesciption(sanitizedOffer, type: .offer) + let answer = await self.waitForLocalDescription(peerConnection: peerConnection, timeoutSeconds: 2.0) + await self.waitForIceGatheringComplete(peerConnection: peerConnection, timeoutSeconds: 5.0) + + // libdatachannel may not embed gathered candidates into the local SDP string + // even when gathering is complete (trickle-only behavior). WHIP endpoints must + // include their ICE candidates in the SDP answer, so we inject candidates that + // arrive via the local-candidate callback. + let baseAnswer = peerConnection.localDescriptionSdp.isEmpty ? answer : peerConnection.localDescriptionSdp + let injected = Self.injectCandidatesIntoAnswerSdp( + baseAnswer, + candidates: session.getLocalCandidates() + ) + let finalAnswer = injected + guard !finalAnswer.isEmpty else { + throw RTCError.notAvail + } + logger.info( + "whip-server: generated answer for \(stream.streamKey) (\(finalAnswer.count) bytes), " + + "iceGathering=\(peerConnection.iceGatheringState), candidates=\(Self.countCandidates(finalAnswer))" + ) + + whipServerDispatchQueue.async { + self.sessionsByStreamKey[stream.streamKey] = session + let path = "/whip/\(stream.streamKey)" + let location: String + if let host = request.header("host"), !host.isEmpty { + location = "http://\(host)\(path)" + } else { + location = path + } + response.send( + text: finalAnswer, + status: .created, + contentType: "application/sdp", + headers: [("Location", location)] + ) + } + } catch { + logger.info("whip-server: \(error)") + response.send(text: "", status: .internalServerError) + } + } + } + } + + private func waitForIceGatheringComplete(peerConnection: RTCPeerConnection, timeoutSeconds: Double) async { + let deadline = Date().addingTimeInterval(timeoutSeconds) + while peerConnection.iceGatheringState != .complete && Date() < deadline { + try? await Task.sleep(for: .milliseconds(50)) + } + } + + private func waitForLocalDescription(peerConnection: RTCPeerConnection, timeoutSeconds: Double) async -> String { + let deadline = Date().addingTimeInterval(timeoutSeconds) + while peerConnection.localDescriptionSdp.isEmpty && Date() < deadline { + try? await Task.sleep(for: .milliseconds(25)) + } + return peerConnection.localDescriptionSdp + } + + private static func parseTrickleIceSdpFrag(_ frag: String) -> (candidates: [String], mid: String?) { + var candidates: [String] = [] + var mid: String? + for rawLine in frag.split(separator: "\n", omittingEmptySubsequences: false) { + let line = rawLine.trimmingCharacters(in: .whitespacesAndNewlines) + if line.hasPrefix("a=mid:") { + mid = String(line.dropFirst("a=mid:".count)) + } else if line.hasPrefix("a=candidate:") || line.hasPrefix("candidate:") { + // Heuristic: libdatachannel often can't send to IPv6 link-local candidates (missing scope), + // and TCP candidates are not useful for our LAN ingest use-case. + let lower = line.lowercased() + if lower.contains(" tcp ") || lower.contains(" fe80:") { + continue + } + candidates.append(line) + } + } + return (candidates, mid) + } + + private static func countCandidates(_ sdp: String) -> Int { + return sdp.split(separator: "\n").filter { + let line = $0.trimmingCharacters(in: .whitespacesAndNewlines) + return line.hasPrefix("a=candidate:") || line.hasPrefix("candidate:") + }.count + } + + private static func sanitizeOfferSdp(_ offer: String) -> (sdp: String, removedCandidates: Int) { + var removed = 0 + let lines = offer.split(separator: "\n", omittingEmptySubsequences: false).map(String.init) + let kept: [String] = lines.filter { line in + let trimmed = line.trimmingCharacters(in: .whitespacesAndNewlines) + guard trimmed.hasPrefix("a=candidate:") else { + return true + } + let lower = trimmed.lowercased() + if lower.contains(" tcp ") || lower.contains(" fe80:") { + removed += 1 + return false + } + return true + } + return (kept.joined(separator: "\n"), removed) + } + + private static func injectCandidatesIntoAnswerSdp(_ sdp: String, candidates: [RTCIceCandidate]) -> String { + guard !sdp.isEmpty, !candidates.isEmpty else { + return sdp + } + // If SDP already contains candidates, keep it (avoid duplicates). + if countCandidates(sdp) > 0 { + return sdp + } + + var lines = sdp.split(separator: "\n", omittingEmptySubsequences: false).map(String.init) + let mediaStarts = lines.indices.filter { lines[$0].trimmingCharacters(in: .whitespacesAndNewlines).hasPrefix("m=") } + guard let firstMediaStart = mediaStarts.first else { + // No media sections; just append at end best-effort. + var appended = lines + for c in candidates { + let l = normalizeCandidateLine(c.candidate) + if !l.isEmpty { appended.append(l) } + } + appended.append("a=end-of-candidates") + return appended.joined(separator: "\n") + } + + // Map mid -> insertion section (start index of that m= section). + var sectionByMid: [String: Int] = [:] + for i in 0..) + for sectionStart in sortedSections { + guard let insertLines = candidatesBySection[sectionStart], !insertLines.isEmpty else { continue } + + let sectionIndex = mediaStarts.firstIndex(of: sectionStart) ?? 0 + let sectionEnd = (sectionIndex + 1 < mediaStarts.count) ? mediaStarts[sectionIndex + 1] : lines.count + + // Insert near end of section, before next m=. + var insertAt = sectionEnd + // Keep end-of-candidates inside section. + let alreadyHasEnd = lines[sectionStart..= 0 && lines[$0].trimmingCharacters(in: .whitespacesAndNewlines) == "a=end-of-candidates" + }) { + insertAt = idx + } + } + + lines.insert(contentsOf: insertLines, at: insertAt) + if !alreadyHasEnd { + lines.insert("a=end-of-candidates", at: insertAt + insertLines.count) + } + } + + return lines.joined(separator: "\n") + } + + private static func normalizeCandidateLine(_ raw: String) -> String { + let trimmed = raw.trimmingCharacters(in: .whitespacesAndNewlines) + if trimmed.isEmpty { return "" } + if trimmed.hasPrefix("a=candidate:") { return trimmed } + if trimmed.hasPrefix("candidate:") { return "a=\(trimmed)" } + if trimmed.contains("candidate:") { + // Best effort: ensure it's an SDP attribute. + return trimmed.hasPrefix("a=") ? trimmed : "a=\(trimmed)" + } + return "" + } +} + diff --git a/Moblin/Moblin.entitlements b/Moblin/Moblin.entitlements index 8ed677a12..ccb07d2f4 100644 --- a/Moblin/Moblin.entitlements +++ b/Moblin/Moblin.entitlements @@ -2,26 +2,26 @@ - com.apple.developer.healthkit - - com.apple.developer.healthkit.access - - com.apple.developer.networking.wifi-info - - com.apple.developer.siri - - com.apple.developer.weatherkit - - com.apple.developer.wifi-aware - - Subscribe - Publish - - com.apple.external-accessory.wireless-configuration - - com.apple.security.application-groups - - group.com.eerimoq.Moblin - + com.apple.developer.healthkit + + com.apple.developer.healthkit.access + + com.apple.developer.networking.wifi-info + + com.apple.developer.siri + + com.apple.developer.weatherkit + + com.apple.developer.wifi-aware + + Subscribe + Publish + + com.apple.external-accessory.wireless-configuration + + com.apple.security.application-groups + + group.io.wemo.mocamapp + diff --git a/Moblin/MoblinApp.swift b/Moblin/MoblinApp.swift index 88edddad2..64db6bcd3 100644 --- a/Moblin/MoblinApp.swift +++ b/Moblin/MoblinApp.swift @@ -1,5 +1,8 @@ import SwiftUI +import HaishinKit +import RTCHaishinKit + @main struct MoblinApp: App { @UIApplicationDelegateAdaptor(AppDelegate.self) var appDelegate @@ -9,6 +12,9 @@ struct MoblinApp: App { init() { MoblinApp.globalModel = Model() _model = StateObject(wrappedValue: MoblinApp.globalModel!) + Task { + await SessionBuilderFactory.shared.register(HTTPSessionFactory()) + } } var body: some Scene { @@ -113,7 +119,8 @@ class AppDelegate: NSObject, UIApplicationDelegate { } func application(_: UIApplication, - supportedInterfaceOrientationsFor _: UIWindow?) -> UIInterfaceOrientationMask + supportedInterfaceOrientationsFor _: UIWindow?) + -> UIInterfaceOrientationMask { return AppDelegate.orientationLock } diff --git a/Moblin/RemoteControl/RemoteControl.swift b/Moblin/RemoteControl/RemoteControl.swift index 59698f9a9..37be4184e 100644 --- a/Moblin/RemoteControl/RemoteControl.swift +++ b/Moblin/RemoteControl/RemoteControl.swift @@ -223,7 +223,7 @@ struct RemoteControlRemoteSceneSettingsWidgetTypeBrowser: Codable { var url: String var width: Int var height: Int - var mode: SettingsWidgetBrowserMode + var audioAndVideoOnly: Bool var fps: Float var styleSheet: String @@ -231,7 +231,7 @@ struct RemoteControlRemoteSceneSettingsWidgetTypeBrowser: Codable { url = browser.url width = browser.width height = browser.height - mode = browser.mode + audioAndVideoOnly = browser.audioAndVideoOnly fps = browser.baseFps styleSheet = browser.styleSheet } @@ -241,7 +241,7 @@ struct RemoteControlRemoteSceneSettingsWidgetTypeBrowser: Codable { browser.url = url browser.width = width browser.height = height - browser.mode = mode + browser.audioAndVideoOnly = audioAndVideoOnly browser.baseFps = fps browser.styleSheet = styleSheet return browser diff --git a/Moblin/RemoteControl/Web/js/index.mjs b/Moblin/RemoteControl/Web/js/index.mjs index b6de90a2a..747fa3fd1 100644 --- a/Moblin/RemoteControl/Web/js/index.mjs +++ b/Moblin/RemoteControl/Web/js/index.mjs @@ -76,7 +76,7 @@ class Connection { } setMuted(on) { - this.sendRequest({ + this.send({ setMute: { on: on, }, @@ -84,7 +84,7 @@ class Connection { } setDebugLogging(on) { - this.sendRequest({ + this.send({ setDebugLogging: { on: on, }, diff --git a/Moblin/StreamingPlatforms/YouTube/YouTubeApi.swift b/Moblin/StreamingPlatforms/YouTube/YouTubeApi.swift index 08fdea8f8..8a94066f1 100644 --- a/Moblin/StreamingPlatforms/YouTube/YouTubeApi.swift +++ b/Moblin/StreamingPlatforms/YouTube/YouTubeApi.swift @@ -16,14 +16,9 @@ struct YouTubeApiLiveBroadcastSnippet: Codable { struct YouTubeApiLiveBroadcastStatus: Codable { let privacyStatus: String - - func visibility() -> YouTubeApiLiveBroadcaseVisibility? { - return YouTubeApiLiveBroadcaseVisibility(rawValue: privacyStatus) - } } struct YouTubeApiLiveBroadcastContentDetails: Codable { - let enableAutoStart: Bool let enableAutoStop: Bool } @@ -74,13 +69,7 @@ enum YouTubeApiLiveBroadcaseVisibility: String, Codable, CaseIterable { } struct YouTubeApiListVideoStreamingDetails: Codable { - let concurrentViewers: String? - let actualStartTime: String? - let actualEndTime: String? - - func isLive() -> Bool { - return actualStartTime != nil && actualEndTime == nil - } + let concurrentViewers: String } struct YouTubeApiListVideo: Codable { diff --git a/Moblin/Various/Media.swift b/Moblin/Various/Media.swift index 756d0f37a..2b83e2c7b 100644 --- a/Moblin/Various/Media.swift +++ b/Moblin/Various/Media.swift @@ -23,6 +23,8 @@ protocol MediaDelegate: AnyObject { func mediaOnRtmpDestinationDisconnected(_ destination: String) func mediaOnRistConnected() func mediaOnRistDisconnected() + func mediaOnWhipConnected() + func mediaOnWhipDisconnected(_ reason: String) func mediaOnAudioMuteChange() func mediaOnAudioBuffer(_ sampleBuffer: CMSampleBuffer) func mediaOnLowFpsImage(_ lowFpsImage: Data?, _ frameNumber: UInt64) @@ -53,6 +55,7 @@ final class Media: NSObject { private var srtStreamNew: SrtStreamMoblin? private var srtStreamOld: SrtStreamOfficial? private var ristStream: RistStream? + private var whipStream: WhipStream? private var srtlaClient: SrtlaClient? private var processor: Processor? private var srtTotalByteCount: Int64 = 0 @@ -101,10 +104,12 @@ final class Media: NSObject { srtStopStream() rtmpStopStream() ristStopStream() + whipStopStream() rtmpStreams.removeAll() srtStreamNew = nil srtStreamOld = nil ristStream = nil + whipStream = nil processor = nil } @@ -120,6 +125,7 @@ final class Media: NSObject { srtStopStream() rtmpStopStream() ristStopStream() + whipStopStream() let processor = Processor() switch proto { case .rtmp: @@ -138,6 +144,7 @@ final class Media: NSObject { srtStreamNew = nil srtStreamOld = nil ristStream = nil + whipStream = nil case .srt: switch srtImplementation { case .moblin: @@ -157,11 +164,19 @@ final class Media: NSObject { } rtmpStreams.removeAll() ristStream = nil + whipStream = nil case .rist: ristStream = RistStream(processor: processor, timecodesEnabled: timecodesEnabled, delegate: self) srtStreamNew = nil srtStreamOld = nil rtmpStreams.removeAll() + whipStream = nil + case .whip: + whipStream = WhipStream(processor: processor, delegate: self) + srtStreamNew = nil + srtStreamOld = nil + rtmpStreams.removeAll() + ristStream = nil } self.processor = processor processor.setDelegate(delegate: self) @@ -607,6 +622,14 @@ final class Media: NSObject { ristStream?.stop() } + func whipStartStream(endpointUrl: URL, settings: SettingsStreamWhip, videoDimensions: CMVideoDimensions) { + whipStream?.start(endpointUrl: endpointUrl, settings: settings, videoDimensions: videoDimensions) + } + + func whipStopStream() { + whipStream?.stop() + } + func setTorch(on: Bool) { processor?.setTorch(value: on) } @@ -1129,6 +1152,16 @@ extension Media: RistStreamDelegate { } } +extension Media: WhipStreamDelegate { + func whipStreamOnConnected() { + delegate?.mediaOnWhipConnected() + } + + func whipStreamOnDisconnected(reason: String) { + delegate?.mediaOnWhipDisconnected(reason) + } +} + extension Media: SrtStreamMoblinDelegate { func srtStreamMoblinConnected() { DispatchQueue.main.async { diff --git a/Moblin/Various/MoblinSettingsUrl.swift b/Moblin/Various/MoblinSettingsUrl.swift index 85b7cbfa9..c881d46cf 100644 --- a/Moblin/Various/MoblinSettingsUrl.swift +++ b/Moblin/Various/MoblinSettingsUrl.swift @@ -71,7 +71,6 @@ class MoblinSettingsUrlStream: Codable { class MoblinSettingsButton: Codable { var type: SettingsQuickButtonType var enabled: Bool? - var page: Int? init(type: SettingsQuickButtonType) { self.type = type diff --git a/Moblin/Various/Model/Model.swift b/Moblin/Various/Model/Model.swift index 9c5a983f9..5d76a27fa 100644 --- a/Moblin/Various/Model/Model.swift +++ b/Moblin/Various/Model/Model.swift @@ -146,9 +146,11 @@ class Raid: ObservableObject { class Ingests: ObservableObject { var rtmp: RtmpServer? + var whip: WhipServer? var srtla: SrtlaServer? var rist: RistServer? var rtsp: [RtspClient] = [] + var whep: [WhepClient] = [] @Published var speedAndTotal = noValue } @@ -192,16 +194,10 @@ struct StreamingPlatformStatus: Equatable { let status: PlatformStatus } -struct ChatPlatformStatus: Equatable { - let platform: Platform - let connected: Bool -} - class StatusTopLeft: ObservableObject { @Published var numberOfViewersIconColor: Color = .white @Published var numberOfViewersCompact = noValue @Published var streamingPlatformStatuses: [StreamingPlatformStatus] = [] - @Published var chatPlatformStatuses: [ChatPlatformStatus] = [] @Published var statusEventsText = noValue @Published var statusChatText = noValue @Published var streamText = noValue @@ -1062,9 +1058,11 @@ final class Model: NSObject, ObservableObject, @unchecked Sendable { object: nil) updateOrientation() reloadRtmpServer() + reloadWhipServer() reloadSrtlaServer() reloadRistServer() reloadRtspClient() + reloadWhepClient() ipMonitor.pathUpdateHandler = handleIpStatusUpdate ipMonitor.start() NotificationCenter.default.addObserver(self, @@ -1408,6 +1406,7 @@ final class Model: NSObject, ObservableObject, @unchecked Sendable { reloadSrtlaServer() reloadRistServer() reloadRtspClient() + reloadWhepClient() chatTextToSpeech.reset(running: true) startWeatherManager() startGeographyManager() diff --git a/Moblin/Various/Model/ModelAppleWatch.swift b/Moblin/Various/Model/ModelAppleWatch.swift index 489bc9937..9dbf8a9fd 100644 --- a/Moblin/Various/Model/ModelAppleWatch.swift +++ b/Moblin/Various/Model/ModelAppleWatch.swift @@ -46,22 +46,24 @@ extension Model { sendIsMutedToWatch(isMuteOn: isMuteOn) sendViewerCountWatch() sendScoreboardPlayersToWatch() - if let widget = getEnabledScoreboardWidgetsInSelectedScene() - .filter({ $0.scoreboard.sport == .padel || $0.scoreboard.sport == .generic }) - .first - { - let scoreboard = widget.scoreboard - switch scoreboard.sport { - case .padel: - sendUpdatePadelScoreboardToWatch(id: widget.id, padel: scoreboard.padel) - case .generic: - sendUpdateGenericScoreboardToWatch(id: widget.id, generic: scoreboard.generic) - default: - break - } + let sceneWidgets: [SettingsWidget] + if let scene = getSelectedScene() { + sceneWidgets = getSceneWidgets(scene: scene, onlyEnabled: true).map { $0.widget } } else { - for widgetId in scoreboardEffects.keys { - sendRemoveScoreboardToWatch(id: widgetId) + sceneWidgets = [] + } + for id in scoreboardEffects.keys { + if let scoreboard = sceneWidgets.first(where: { $0.id == id })?.scoreboard { + switch scoreboard.sport { + case .padel: + sendUpdatePadelScoreboardToWatch(id: id, padel: scoreboard.padel) + case .generic: + sendUpdateGenericScoreboardToWatch(id: id, generic: scoreboard.generic) + default: + break + } + } else { + sendRemoveScoreboardToWatch(id: id) } } } @@ -405,6 +407,57 @@ extension Model { func isWatchLocal() -> Bool { return !isWatchRemoteControl() } + + func updateScoreboardEffects() { + let sceneWidgets: [SettingsWidget] + if let scene = getSelectedScene() { + sceneWidgets = getSceneWidgets(scene: scene, onlyEnabled: true).map { $0.widget } + } else { + sceneWidgets = [] + } + for (id, scoreboardEffect) in scoreboardEffects { + guard let scoreboard = sceneWidgets.first(where: { $0.id == id })?.scoreboard else { + continue + } + switch scoreboard.sport { + case .padel: + break + case .generic: + guard let widget = findWidget(id: id) else { + continue + } + guard !widget.scoreboard.generic.clock.isStopped else { + continue + } + widget.scoreboard.generic.clock.tick() + DispatchQueue.main.async { + scoreboardEffect.update( + scoreboard: widget.scoreboard, + config: self.getCurrentConfig(), + players: self.database.scoreboardPlayers + ) + } + sendUpdateGenericScoreboardToWatch(id: id, generic: scoreboard.generic) + default: + guard let widget = findWidget(id: id) else { + continue + } + guard !widget.scoreboard.modular.clock.isStopped else { + continue + } + widget.scoreboard.modular.clock.tick() + DispatchQueue.main.async { + scoreboardEffect.update( + scoreboard: widget.scoreboard, + config: self.getCurrentConfig(), + players: self.database.scoreboardPlayers + ) + self.remoteControlScoreboardUpdate() + } + sendUpdateGenericScoreboardToWatch(id: id, generic: scoreboard.generic) + } + } + } } extension Model: WCSessionDelegate { @@ -619,7 +672,98 @@ extension Model: WCSessionDelegate { guard self.isWatchLocal() else { return } - self.handleUpdatePadelScoreboard(action: action) + guard let widget = self.findWidget(id: action.id) else { + return + } + switch action.action { + case .reset: + self.handleUpdatePadelScoreboardReset(scoreboard: widget.scoreboard.padel) + case .undo: + self.handleUpdatePadelScoreboardUndo(scoreboard: widget.scoreboard.padel) + case .incrementHome: + self.handleUpdatePadelScoreboardIncrementHome(scoreboard: widget.scoreboard.padel) + case .incrementAway: + self.handleUpdatePadelScoreboardIncrementAway(scoreboard: widget.scoreboard.padel) + case let .players(players): + self.handleUpdatePadelScoreboardChangePlayers(scoreboard: widget.scoreboard.padel, + players: players) + } + guard let scoreboardEffect = self.scoreboardEffects[action.id] else { + return + } + scoreboardEffect.update(scoreboard: widget.scoreboard, + config: self.getCurrentConfig(), + players: self.database.scoreboardPlayers) + self.sendUpdatePadelScoreboardToWatch(id: action.id, padel: widget.scoreboard.padel) + } + } + + private func handleUpdatePadelScoreboardReset(scoreboard: SettingsWidgetPadelScoreboard) { + scoreboard.score = [.init()] + scoreboard.scoreChanges.removeAll() + } + + private func handleUpdatePadelScoreboardUndo(scoreboard: SettingsWidgetPadelScoreboard) { + guard let team = scoreboard.scoreChanges.popLast() else { + return + } + guard let score = scoreboard.score.last else { + return + } + if score.home == 0, score.away == 0, scoreboard.score.count > 1 { + scoreboard.score.removeLast() + } + let index = scoreboard.score.count - 1 + switch team { + case .home: + if scoreboard.score[index].home > 0 { + scoreboard.score[index].home -= 1 + } + case .away: + if scoreboard.score[index].away > 0 { + scoreboard.score[index].away -= 1 + } + } + } + + private func handleUpdatePadelScoreboardIncrementHome(scoreboard: SettingsWidgetPadelScoreboard) { + if !isCurrentSetCompleted(scoreboard: scoreboard) { + guard !isMatchCompleted(scoreboard: scoreboard) else { + return + } + scoreboard.score[scoreboard.score.count - 1].home += 1 + scoreboard.scoreChanges.append(.home) + } else { + padelScoreboardUpdateSetCompleted(scoreboard: scoreboard) + } + } + + private func handleUpdatePadelScoreboardIncrementAway(scoreboard: SettingsWidgetPadelScoreboard) { + if !isCurrentSetCompleted(scoreboard: scoreboard) { + guard !isMatchCompleted(scoreboard: scoreboard) else { + return + } + scoreboard.score[scoreboard.score.count - 1].away += 1 + scoreboard.scoreChanges.append(.away) + } else { + padelScoreboardUpdateSetCompleted(scoreboard: scoreboard) + } + } + + private func handleUpdatePadelScoreboardChangePlayers(scoreboard: SettingsWidgetPadelScoreboard, + players: WatchProtocolPadelScoreboardActionPlayers) + { + if players.home.count > 0 { + scoreboard.homePlayer1 = players.home[0] + if players.home.count > 1 { + scoreboard.homePlayer2 = players.home[1] + } + } + if players.away.count > 0 { + scoreboard.awayPlayer1 = players.away[0] + if players.away.count > 1 { + scoreboard.awayPlayer2 = players.away[1] + } } } @@ -635,8 +779,137 @@ extension Model: WCSessionDelegate { guard self.isWatchLocal() else { return } - self.handleUpdateGenericScoreboard(action: action) + guard let widget = self.findWidget(id: action.id) else { + return + } + switch action.action { + case .reset: + self.handleUpdateGenericScoreboardReset(scoreboard: widget.scoreboard.generic) + case .undo: + self.handleUpdateGenericScoreboardUndo(scoreboard: widget.scoreboard.generic) + case .incrementHome: + self.handleUpdateGenericScoreboardIncrementHome(scoreboard: widget.scoreboard.generic) + case .incrementAway: + self.handleUpdateGenericScoreboardIncrementAway(scoreboard: widget.scoreboard.generic) + case let .setTitle(title): + self.handleUpdateGenericScoreboardSetTitle( + scoreboard: widget.scoreboard.generic, + title: title + ) + case let .setClock(minutes, seconds): + self.handleUpdateGenericScoreboardSetClock(scoreboard: widget.scoreboard.generic, + minutes: minutes, + seconds: seconds) + case let .setClockState(stopped: stopped): + self.handleUpdateGenericScoreboardSetClockState(scoreboard: widget.scoreboard.generic, + stopped: stopped) + } + guard let scoreboardEffect = self.scoreboardEffects[action.id] else { + return + } + scoreboardEffect.update(scoreboard: widget.scoreboard, + config: self.getCurrentConfig(), + players: self.database.scoreboardPlayers) + self.sendUpdateGenericScoreboardToWatch(id: action.id, generic: widget.scoreboard.generic) + } + } + + private func handleUpdateGenericScoreboardReset(scoreboard: SettingsWidgetGenericScoreboard) { + scoreboard.score.home = 0 + scoreboard.score.away = 0 + scoreboard.scoreChanges.removeAll() + } + + private func handleUpdateGenericScoreboardUndo(scoreboard: SettingsWidgetGenericScoreboard) { + guard let team = scoreboard.scoreChanges.popLast() else { + return + } + switch team { + case .home: + if scoreboard.score.home > 0 { + scoreboard.score.home -= 1 + } + case .away: + if scoreboard.score.away > 0 { + scoreboard.score.away -= 1 + } + } + } + + private func handleUpdateGenericScoreboardIncrementHome(scoreboard: SettingsWidgetGenericScoreboard) { + scoreboard.score.home += 1 + scoreboard.scoreChanges.append(.home) + } + + private func handleUpdateGenericScoreboardIncrementAway(scoreboard: SettingsWidgetGenericScoreboard) { + scoreboard.score.away += 1 + scoreboard.scoreChanges.append(.away) + } + + private func handleUpdateGenericScoreboardSetTitle(scoreboard: SettingsWidgetGenericScoreboard, + title: String) + { + scoreboard.title = title + } + + private func handleUpdateGenericScoreboardSetClock(scoreboard: SettingsWidgetGenericScoreboard, + minutes: Int, + seconds: Int) + { + scoreboard.clock.minutes = minutes.clamped(to: 0 ... scoreboard.clock.maximum) + if scoreboard.clock.minutes == scoreboard.clock.maximum { + scoreboard.clock.seconds = 0 + } else { + scoreboard.clock.seconds = seconds.clamped(to: 0 ... 59) + } + } + + private func handleUpdateGenericScoreboardSetClockState(scoreboard: SettingsWidgetGenericScoreboard, + stopped: Bool) + { + scoreboard.clock.isStopped = stopped + } + + private func padelScoreboardUpdateSetCompleted(scoreboard: SettingsWidgetPadelScoreboard) { + guard let score = scoreboard.score.last else { + return + } + guard isSetCompleted(score: score) else { + return + } + guard !isMatchCompleted(scoreboard: scoreboard) else { + return + } + scoreboard.score.append(.init()) + } + + private func isCurrentSetCompleted(scoreboard: SettingsWidgetPadelScoreboard) -> Bool { + guard let score = scoreboard.score.last else { + return false + } + return isSetCompleted(score: score) + } + + private func isSetCompleted(score: SettingsWidgetScoreboardScore) -> Bool { + let maxScore = max(score.home, score.away) + let minScore = min(score.home, score.away) + if maxScore == 6 && minScore <= 4 { + return true + } + if maxScore == 7 { + return true + } + return false + } + + private func isMatchCompleted(scoreboard: SettingsWidgetPadelScoreboard) -> Bool { + if scoreboard.score.count < 5 { + return false + } + guard let score = scoreboard.score.last else { + return false } + return isSetCompleted(score: score) } private func handleCreateStreamMarker() { diff --git a/Moblin/Various/Model/ModelCamera.swift b/Moblin/Various/Model/ModelCamera.swift index 6bc5792f4..f10b38b1d 100644 --- a/Moblin/Various/Model/ModelCamera.swift +++ b/Moblin/Various/Model/ModelCamera.swift @@ -560,6 +560,9 @@ extension Model { cameras += rtmpCameras().map { ($0.0.uuidString, $0.1) } + cameras += whipCameras().map { + ($0.0.uuidString, $0.1) + } cameras += srtlaCameras().map { ($0.0.uuidString, $0.1) } @@ -569,6 +572,9 @@ extension Model { cameras += rtspCameras().map { ($0.0.uuidString, $0.1) } + cameras += whepCameras().map { + ($0.0.uuidString, $0.1) + } cameras += playerCameras().map { ($0.0.uuidString, $0.1) } @@ -618,10 +624,14 @@ extension Model { return .srtla(id: id) } else if let id = getRtmpStream(idString: cameraId)?.id { return .rtmp(id: id) + } else if let id = getWhipStream(idString: cameraId)?.id { + return .whip(id: id) } else if let id = getRistStream(idString: cameraId)?.id { return .rist(id: id) } else if let id = getRtspStream(idString: cameraId)?.id { return .rtsp(id: id) + } else if let id = getWhepStream(idString: cameraId)?.id { + return .whep(id: id) } else if let id = getMediaPlayer(idString: cameraId)?.id { return .mediaPlayer(id: id) } else if isBackCamera(cameraId: cameraId) { @@ -650,12 +660,16 @@ extension Model { switch settingsCameraId { case let .rtmp(id): return id.uuidString + case let .whip(id): + return id.uuidString case let .srtla(id): return id.uuidString case let .rist(id: id): return id.uuidString case let .rtsp(id: id): return id.uuidString + case let .whep(id: id): + return id.uuidString case let .mediaPlayer(id): return id.uuidString case let .external(id, _): @@ -700,12 +714,16 @@ extension Model { switch settingsCameraId { case let .rtmp(id): return getRtmpStream(id: id)?.camera() ?? unknownSad + case let .whip(id): + return getWhipStream(id: id)?.camera() ?? unknownSad case let .srtla(id): return getSrtlaStream(id: id)?.camera() ?? unknownSad case let .rist(id): return getRistStream(id: id)?.camera() ?? unknownSad case let .rtsp(id): return getRtspStream(id: id)?.camera() ?? unknownSad + case let .whep(id): + return getWhepStream(id: id)?.camera() ?? unknownSad case let .mediaPlayer(id): return getMediaPlayer(id: id)?.camera() ?? unknownSad case let .external(_, name): @@ -809,12 +827,16 @@ extension Model { switch cameraId { case let .rtmp(id: id): return id + case let .whip(id: id): + return id case let .srtla(id: id): return id case let .rist(id: id): return id case let .rtsp(id: id): return id + case let .whep(id: id): + return id case let .mediaPlayer(id: id): return id case .screenCapture: diff --git a/Moblin/Various/Model/ModelChat.swift b/Moblin/Various/Model/ModelChat.swift index d9e68ba29..5ecad75be 100644 --- a/Moblin/Various/Model/ModelChat.swift +++ b/Moblin/Various/Model/ModelChat.swift @@ -426,7 +426,6 @@ extension Model { func updateStatusChatText() { let status: String - var statuses: [ChatPlatformStatus] = [] if !isChatConfigured() { status = String(localized: "Not configured") } else if isRemoteControlChatAndEvents(platform: nil) { @@ -435,39 +434,14 @@ extension Model { } else { status = String(localized: "Disconnected (remote control)") } + } else if isChatConnected() { + status = String(localized: "Connected") } else { - if isTwitchChatConfigured() { - statuses.append(ChatPlatformStatus(platform: .twitch, connected: isTwitchChatConnected())) - } - if isKickPusherConfigured() { - statuses.append(ChatPlatformStatus(platform: .kick, connected: isKickPusherConnected())) - } - if isYouTubeLiveChatConfigured() { - statuses.append(ChatPlatformStatus(platform: .youTube, - connected: isYouTubeLiveChatConnected())) - } - if isSoopChatConfigured() { - statuses.append(ChatPlatformStatus(platform: .soop, connected: isSoopChatConnected())) - } - if isOpenStreamingPlatformChatConfigured() { - statuses.append(ChatPlatformStatus(platform: .openStreamingPlatform, - connected: isOpenStreamingPlatformChatConnected())) - } - if isDLiveChatConfigured() { - statuses.append(ChatPlatformStatus(platform: .dlive, connected: isDLiveChatConnected())) - } - if statuses.allSatisfy({ $0.connected }) { - status = String(localized: "Connected") - } else { - status = String(localized: "Disconnected") - } + status = String(localized: "Disconnected") } if status != statusTopLeft.statusChatText { statusTopLeft.statusChatText = status } - if statuses != statusTopLeft.chatPlatformStatuses { - statusTopLeft.chatPlatformStatuses = statuses - } } func printChatMessage(post: ChatPost) { diff --git a/Moblin/Various/Model/ModelRemoteControl.swift b/Moblin/Various/Model/ModelRemoteControl.swift index aef245d2e..2a879e285 100644 --- a/Moblin/Various/Model/ModelRemoteControl.swift +++ b/Moblin/Various/Model/ModelRemoteControl.swift @@ -500,8 +500,8 @@ extension Model { remoteControlWeb?.log(entry: entry) } - func remoteControlScoreboardUpdate(scoreboard: SettingsWidgetScoreboard) { - let config = getModularScoreboardConfig(scoreboard: scoreboard) + func remoteControlScoreboardUpdate() { + let config = getCurrentConfig() remoteControlStreamer?.sendScoreboardUpdate(config: config) remoteControlWeb?.sendScoreboardUpdate(config: config) } @@ -891,8 +891,7 @@ extension Model: RemoteControlAssistantDelegate { extension Model: RemoteControlWebDelegate { func remoteControlWebConnected() { remoteControlWeb?.stateChanged(state: createRemoteControlStateChanged()) - let scoreboard = getEnabledScoreboardWidgetsInSelectedScene().first?.scoreboard - remoteControlWeb?.sendScoreboardUpdate(config: getModularScoreboardConfig(scoreboard: scoreboard)) + remoteControlWeb?.sendScoreboardUpdate(config: getCurrentConfig()) } func remoteControlWebGetStatus() diff --git a/Moblin/Various/Model/ModelScene.swift b/Moblin/Various/Model/ModelScene.swift index 963d9d14e..e9f422dfb 100644 --- a/Moblin/Various/Model/ModelScene.swift +++ b/Moblin/Various/Model/ModelScene.swift @@ -38,7 +38,7 @@ struct WidgetInScene: Identifiable { extension Model { func getTextEffects(id: UUID) -> [TextEffect] { var effects: [TextEffect] = [] - if let effect = textEffects[id] { + if let effect = textEffects.first(where: { $0.key == id })?.value { effects.append(effect) } for slideshow in slideshowEffects.values { @@ -52,39 +52,39 @@ extension Model { } func getVideoSourceEffect(id: UUID) -> VideoSourceEffect? { - return videoSourceEffects[id] + return videoSourceEffects.first(where: { $0.key == id })?.value } func getVTuberEffect(id: UUID) -> VTuberEffect? { - return vTuberEffects[id] + return vTuberEffects.first(where: { $0.key == id })?.value } func getPngTuberEffect(id: UUID) -> PngTuberEffect? { - return pngTuberEffects[id] + return pngTuberEffects.first(where: { $0.key == id })?.value } func getSnapshotEffect(id: UUID) -> SnapshotEffect? { - return snapshotEffects[id] + return snapshotEffects.first(where: { $0.key == id })?.value } func getChatEffect(id: UUID) -> ChatEffect? { - return chatEffects[id] + return chatEffects.first(where: { $0.key == id })?.value } func getQrCodeEffect(id: UUID) -> QrCodeEffect? { - return qrCodeEffects[id] + return qrCodeEffects.first(where: { $0.key == id })?.value } func getWheelOfLuckEffect(id: UUID) -> WheelOfLuckEffect? { - return wheelOfLuckEffects[id] + return wheelOfLuckEffects.first(where: { $0.key == id })?.value } func getBingoCardEffect(id: UUID) -> BingoCardEffect? { - return bingoCardEffects[id] + return bingoCardEffects.first(where: { $0.key == id })?.value } func getScoreboardEffect(id: UUID) -> ScoreboardEffect? { - return scoreboardEffects[id] + return scoreboardEffects.first(where: { $0.key == id })?.value } func getWidgetShapeEffect(_ widget: SettingsWidget, _ effect: SettingsVideoEffect) -> ShapeEffect? { @@ -249,17 +249,28 @@ extension Model { streamOverlay.isFrontCameraSelected = true case .rtmp: attachBufferedCamera(cameraId: scene.videoSource.rtmpCameraId, scene: scene) + case .whip: + attachBufferedCamera(cameraId: scene.videoSource.whipCameraId, scene: scene) case .srtla: attachBufferedCamera(cameraId: scene.videoSource.srtlaCameraId, scene: scene) case .rist: attachBufferedCamera(cameraId: scene.videoSource.ristCameraId, scene: scene) case .rtsp: attachBufferedCamera(cameraId: scene.videoSource.rtspCameraId, scene: scene) + case .whep: + attachBufferedCamera(cameraId: scene.videoSource.whepCameraId, scene: scene) case .mediaPlayer: mediaPlayers[scene.videoSource.mediaPlayerCameraId]?.activate() attachBufferedCamera(cameraId: scene.videoSource.mediaPlayerCameraId, scene: scene) case .external: - attachExternalCamera(scene: scene) + // Backward-compat: WHIP/WHEP used to be stored as "external" camera IDs (uuidString). + if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhipStream(id: id) != nil { + attachBufferedCamera(cameraId: id, scene: scene) + } else if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhepStream(id: id) != nil { + attachBufferedCamera(cameraId: id, scene: scene) + } else { + attachExternalCamera(scene: scene) + } case .screenCapture: attachBufferedCamera(cameraId: screenCaptureCameraId, scene: scene) case .backTripleLowEnergy: @@ -429,14 +440,24 @@ extension Model { switch scene.videoSource.cameraPosition { case .rtmp: return activeBufferedVideoIds.contains(scene.videoSource.rtmpCameraId) + case .whip: + return activeBufferedVideoIds.contains(scene.videoSource.whipCameraId) case .srtla: return activeBufferedVideoIds.contains(scene.videoSource.srtlaCameraId) case .rist: return activeBufferedVideoIds.contains(scene.videoSource.ristCameraId) case .rtsp: return activeBufferedVideoIds.contains(scene.videoSource.rtspCameraId) + case .whep: + return activeBufferedVideoIds.contains(scene.videoSource.whepCameraId) case .external: - return isExternalCameraConnected(id: scene.videoSource.externalCameraId) + if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhipStream(id: id) != nil { + return activeBufferedVideoIds.contains(id) + } else if let id = UUID(uuidString: scene.videoSource.externalCameraId), getWhepStream(id: id) != nil { + return activeBufferedVideoIds.contains(id) + } else { + return isExternalCameraConnected(id: scene.videoSource.externalCameraId) + } default: return true } @@ -604,15 +625,15 @@ extension Model { } private func getImageEffect(id: UUID) -> ImageEffect? { - return imageEffects[id] + return imageEffects.first(where: { $0.key == id })?.value } private func getBrowserEffect(id: UUID) -> BrowserEffect? { - return browserEffects[id] + return browserEffects.first(where: { $0.key == id })?.value } private func getMapEffect(id: UUID) -> MapEffect? { - return mapEffects[id] + return mapEffects.first(where: { $0.key == id })?.value } private func resetVideoEffects(widgets: [SettingsWidget]) { @@ -1129,13 +1150,13 @@ extension Model { _ widget: SettingsWidget, _ effects: inout [VideoEffect] ) { - guard let effect = getScoreboardEffect(id: widget.id), !effects.contains(effect) else { + guard let effect = scoreboardEffects[widget.id], !effects.contains(effect) else { return } effect.setSceneWidget(sceneWidget: sceneWidget.clone()) DispatchQueue.main.async { effect.update(scoreboard: widget.scoreboard, - config: self.getModularScoreboardConfig(scoreboard: widget.scoreboard), + config: self.getCurrentConfig(), players: self.database.scoreboardPlayers) } if isWatchLocal() { diff --git a/Moblin/Various/Model/ModelScoreboard.swift b/Moblin/Various/Model/ModelScoreboard.swift index 4c3a35ffb..4e3b488a7 100644 --- a/Moblin/Various/Model/ModelScoreboard.swift +++ b/Moblin/Various/Model/ModelScoreboard.swift @@ -276,115 +276,8 @@ private let configs: [String: RemoteControlScoreboardMatchConfig] = [ ] extension Model { - @MainActor - func handleUpdatePadelScoreboard(action: WatchProtocolPadelScoreboardAction) { - guard let scoreboard = findWidget(id: action.id)?.scoreboard else { - return - } - switch action.action { - case .reset: - handleUpdatePadelScoreboardReset(scoreboard: scoreboard.padel) - case .undo: - handleUpdatePadelScoreboardUndo(scoreboard: scoreboard.padel) - case .incrementHome: - handleUpdatePadelScoreboardIncrementHome(scoreboard: scoreboard.padel) - case .incrementAway: - handleUpdatePadelScoreboardIncrementAway(scoreboard: scoreboard.padel) - case let .players(players): - handleUpdatePadelScoreboardChangePlayers(scoreboard: scoreboard.padel, - players: players) - } - getScoreboardEffect(id: action.id)?.update(scoreboard: scoreboard, - config: getModularScoreboardConfig(scoreboard: scoreboard), - players: database.scoreboardPlayers) - sendUpdatePadelScoreboardToWatch(id: action.id, padel: scoreboard.padel) - } - - @MainActor - func handleUpdateGenericScoreboard(action: WatchProtocolGenericScoreboardAction) { - guard let scoreboard = findWidget(id: action.id)?.scoreboard else { - return - } - switch action.action { - case .reset: - handleUpdateGenericScoreboardReset(scoreboard: scoreboard.generic) - case .undo: - handleUpdateGenericScoreboardUndo(scoreboard: scoreboard.generic) - case .incrementHome: - handleUpdateGenericScoreboardIncrementHome(scoreboard: scoreboard.generic) - case .incrementAway: - handleUpdateGenericScoreboardIncrementAway(scoreboard: scoreboard.generic) - case let .setTitle(title): - handleUpdateGenericScoreboardSetTitle( - scoreboard: scoreboard.generic, - title: title - ) - case let .setClock(minutes, seconds): - handleUpdateGenericScoreboardSetClock(scoreboard: scoreboard.generic, - minutes: minutes, - seconds: seconds) - case let .setClockState(stopped: stopped): - handleUpdateGenericScoreboardSetClockState(scoreboard: scoreboard.generic, - stopped: stopped) - } - getScoreboardEffect(id: action.id)?.update(scoreboard: scoreboard, - config: getModularScoreboardConfig(scoreboard: scoreboard), - players: database.scoreboardPlayers) - sendUpdateGenericScoreboardToWatch(id: action.id, generic: scoreboard.generic) - } - - func getEnabledScoreboardWidgetsInSelectedScene() -> [SettingsWidget] { - if let scene = getSelectedScene() { - return getSceneWidgets(scene: scene, onlyEnabled: true) - .filter { $0.widget.type == .scoreboard } - .map { $0.widget } - } else { - return [] - } - } - - func updateScoreboardEffects() { - for widget in getEnabledScoreboardWidgetsInSelectedScene() { - guard let effect = getScoreboardEffect(id: widget.id) else { - continue - } - let scoreboard = widget.scoreboard - switch scoreboard.sport { - case .padel: - break - case .generic: - guard !scoreboard.generic.clock.isStopped else { - continue - } - scoreboard.generic.clock.tick() - DispatchQueue.main.async { - effect.update( - scoreboard: scoreboard, - config: self.getModularScoreboardConfig(scoreboard: scoreboard), - players: self.database.scoreboardPlayers - ) - } - sendUpdateGenericScoreboardToWatch(id: widget.id, generic: scoreboard.generic) - default: - guard !scoreboard.modular.clock.isStopped else { - continue - } - widget.scoreboard.modular.clock.tick() - DispatchQueue.main.async { - effect.update( - scoreboard: scoreboard, - config: self.getModularScoreboardConfig(scoreboard: scoreboard), - players: self.database.scoreboardPlayers - ) - } - remoteControlScoreboardUpdate(scoreboard: scoreboard) - } - } - } - - func getModularScoreboardConfig(scoreboard: SettingsWidgetScoreboard?) - -> RemoteControlScoreboardMatchConfig - { + func getCurrentConfig() -> RemoteControlScoreboardMatchConfig { + let scoreboard = database.widgets.first(where: { $0.type == .scoreboard })?.scoreboard let sportId: String switch scoreboard?.sport { case .basketball: @@ -490,26 +383,26 @@ extension Model { return finalSports.isEmpty ? ["volleyball", "basketball"] : finalSports } - private func updateScoreboardEffect(widget: SettingsWidget) { + func updateScoreboardEffect(widget: SettingsWidget) { DispatchQueue.main.async { self.getScoreboardEffect(id: widget.id)? .update(scoreboard: widget.scoreboard, - config: self.getModularScoreboardConfig(scoreboard: widget.scoreboard), + config: self.getCurrentConfig(), players: self.database.scoreboardPlayers) } } func handleScoreboardToggleClock() { - guard let widget = getEnabledScoreboardWidgetsInSelectedScene().first else { + guard let widget = database.widgets.first(where: { $0.type == .scoreboard }) else { return } widget.scoreboard.modular.clock.isStopped.toggle() updateScoreboardEffect(widget: widget) - remoteControlScoreboardUpdate(scoreboard: widget.scoreboard) + remoteControlScoreboardUpdate() } func handleScoreboardSetDuration(minutes: Int) { - guard let widget = getEnabledScoreboardWidgetsInSelectedScene().first else { + guard let widget = database.widgets.first(where: { $0.type == .scoreboard }) else { return } let clock = widget.scoreboard.modular.clock @@ -517,11 +410,11 @@ extension Model { clock.reset() clock.isStopped = true updateScoreboardEffect(widget: widget) - remoteControlScoreboardUpdate(scoreboard: widget.scoreboard) + remoteControlScoreboardUpdate() } func handleScoreboardSetClockManual(time: String) { - guard let widget = getEnabledScoreboardWidgetsInSelectedScene().first else { + guard let widget = database.widgets.first(where: { $0.type == .scoreboard }) else { return } let (minutes, seconds) = clockAsMinutesAndSeconds(clock: time) @@ -530,238 +423,69 @@ extension Model { clock.seconds = seconds clock.isStopped = true updateScoreboardEffect(widget: widget) - remoteControlScoreboardUpdate(scoreboard: widget.scoreboard) + remoteControlScoreboardUpdate() } func handleExternalScoreboardUpdate(config: RemoteControlScoreboardMatchConfig) { - guard let widget = getEnabledScoreboardWidgetsInSelectedScene().first else { - return - } - let scoreboard = widget.scoreboard - let modular = scoreboard.modular - modular.config = config - scoreboard.setModularSport(sportId: config.sportId) - modular.setLayout(name: config.layout) - if let showTitle = config.global.showTitle { - modular.showTitle = showTitle - } - if let showStats = config.global.showStats { - modular.showGlobalStatsBlock = showStats - } - if let show2nd = config.global.showMoreStats { - modular.showMoreStats = show2nd - } - modular.home.name = config.team1.name - modular.away.name = config.team2.name - modular.title = config.global.title - modular.period = config.global.period - modular.infoBoxText = config.global.infoBoxText - if let score = Int(config.team1.primaryScore) { - modular.score.home = score - } - if let score = Int(config.team2.primaryScore) { - modular.score.away = score - } - modular.home.setHexColors(config.team1.textColor, config.team1.bgColor) - modular.away.setHexColors(config.team2.textColor, config.team2.bgColor) - let (minutes, seconds) = config.global.minutesAndSeconds() - modular.clock.minutes = minutes - modular.clock.seconds = seconds - modular.clock.direction = (config.global.timerDirection == "down") ? .down : .up - updateScoreboardEffect(widget: widget) - remoteControlScoreboardUpdate(scoreboard: scoreboard) - } - - func handleSportSwitch(sportId: String) { - guard let widget = getEnabledScoreboardWidgetsInSelectedScene().first else { - return - } - let scoreboard = widget.scoreboard - scoreboard.setModularSport(sportId: sportId) - if let config = configs[sportId] { + for widget in database.widgets where widget.type == .scoreboard { + let scoreboard = widget.scoreboard let modular = scoreboard.modular modular.config = config + scoreboard.setModularSport(sportId: config.sportId) modular.setLayout(name: config.layout) - modular.score.home = Int(config.team1.primaryScore) ?? 0 - modular.score.away = Int(config.team2.primaryScore) ?? 0 + if let showTitle = config.global.showTitle { + modular.showTitle = showTitle + } + if let showStats = config.global.showStats { + modular.showGlobalStatsBlock = showStats + } + if let show2nd = config.global.showMoreStats { + modular.showMoreStats = show2nd + } + modular.home.name = config.team1.name + modular.away.name = config.team2.name + modular.title = config.global.title modular.period = config.global.period + modular.infoBoxText = config.global.infoBoxText + if let score = Int(config.team1.primaryScore) { + modular.score.home = score + } + if let score = Int(config.team2.primaryScore) { + modular.score.away = score + } + modular.home.setHexColors(config.team1.textColor, config.team1.bgColor) + modular.away.setHexColors(config.team2.textColor, config.team2.bgColor) let (minutes, seconds) = config.global.minutesAndSeconds() modular.clock.minutes = minutes modular.clock.seconds = seconds - modular.clock.maximum = minutes + (seconds > 0 ? 1 : 0) modular.clock.direction = (config.global.timerDirection == "down") ? .down : .up - modular.clock.isStopped = true - modular.home.setHexColors(config.team1.textColor, config.team1.bgColor) - modular.away.setHexColors(config.team2.textColor, config.team2.bgColor) updateScoreboardEffect(widget: widget) - remoteControlScoreboardUpdate(scoreboard: scoreboard) } + remoteControlScoreboardUpdate() } - private func handleUpdatePadelScoreboardReset(scoreboard: SettingsWidgetPadelScoreboard) { - scoreboard.score = [.init()] - scoreboard.scoreChanges.removeAll() - } - - private func handleUpdatePadelScoreboardUndo(scoreboard: SettingsWidgetPadelScoreboard) { - guard let team = scoreboard.scoreChanges.popLast() else { - return - } - guard let score = scoreboard.score.last else { - return - } - if score.home == 0, score.away == 0, scoreboard.score.count > 1 { - scoreboard.score.removeLast() - } - let index = scoreboard.score.count - 1 - switch team { - case .home: - if scoreboard.score[index].home > 0 { - scoreboard.score[index].home -= 1 - } - case .away: - if scoreboard.score[index].away > 0 { - scoreboard.score[index].away -= 1 - } - } - } - - private func handleUpdatePadelScoreboardIncrementHome(scoreboard: SettingsWidgetPadelScoreboard) { - if !isCurrentSetCompleted(scoreboard: scoreboard) { - guard !isMatchCompleted(scoreboard: scoreboard) else { - return - } - scoreboard.score[scoreboard.score.count - 1].home += 1 - scoreboard.scoreChanges.append(.home) - } else { - padelScoreboardUpdateSetCompleted(scoreboard: scoreboard) - } - } - - private func handleUpdatePadelScoreboardIncrementAway(scoreboard: SettingsWidgetPadelScoreboard) { - if !isCurrentSetCompleted(scoreboard: scoreboard) { - guard !isMatchCompleted(scoreboard: scoreboard) else { - return - } - scoreboard.score[scoreboard.score.count - 1].away += 1 - scoreboard.scoreChanges.append(.away) - } else { - padelScoreboardUpdateSetCompleted(scoreboard: scoreboard) - } - } - - private func handleUpdatePadelScoreboardChangePlayers(scoreboard: SettingsWidgetPadelScoreboard, - players: WatchProtocolPadelScoreboardActionPlayers) - { - if players.home.count > 0 { - scoreboard.homePlayer1 = players.home[0] - if players.home.count > 1 { - scoreboard.homePlayer2 = players.home[1] - } - } - if players.away.count > 0 { - scoreboard.awayPlayer1 = players.away[0] - if players.away.count > 1 { - scoreboard.awayPlayer2 = players.away[1] - } - } - } - - private func handleUpdateGenericScoreboardReset(scoreboard: SettingsWidgetGenericScoreboard) { - scoreboard.score.home = 0 - scoreboard.score.away = 0 - scoreboard.scoreChanges.removeAll() - } - - private func handleUpdateGenericScoreboardUndo(scoreboard: SettingsWidgetGenericScoreboard) { - guard let team = scoreboard.scoreChanges.popLast() else { - return - } - switch team { - case .home: - if scoreboard.score.home > 0 { - scoreboard.score.home -= 1 - } - case .away: - if scoreboard.score.away > 0 { - scoreboard.score.away -= 1 + func handleSportSwitch(sportId: String) { + for widget in database.widgets where widget.type == .scoreboard { + let scoreboard = widget.scoreboard + scoreboard.setModularSport(sportId: sportId) + if let config = configs[sportId] { + let modular = scoreboard.modular + modular.config = config + modular.setLayout(name: config.layout) + modular.score.home = Int(config.team1.primaryScore) ?? 0 + modular.score.away = Int(config.team2.primaryScore) ?? 0 + modular.period = config.global.period + let (minutes, seconds) = config.global.minutesAndSeconds() + modular.clock.minutes = minutes + modular.clock.seconds = seconds + modular.clock.maximum = minutes + (seconds > 0 ? 1 : 0) + modular.clock.direction = (config.global.timerDirection == "down") ? .down : .up + modular.clock.isStopped = true + modular.home.setHexColors(config.team1.textColor, config.team1.bgColor) + modular.away.setHexColors(config.team2.textColor, config.team2.bgColor) + updateScoreboardEffect(widget: widget) + remoteControlScoreboardUpdate() } } } - - private func handleUpdateGenericScoreboardIncrementHome(scoreboard: SettingsWidgetGenericScoreboard) { - scoreboard.score.home += 1 - scoreboard.scoreChanges.append(.home) - } - - private func handleUpdateGenericScoreboardIncrementAway(scoreboard: SettingsWidgetGenericScoreboard) { - scoreboard.score.away += 1 - scoreboard.scoreChanges.append(.away) - } - - private func handleUpdateGenericScoreboardSetTitle(scoreboard: SettingsWidgetGenericScoreboard, - title: String) - { - scoreboard.title = title - } - - private func handleUpdateGenericScoreboardSetClock(scoreboard: SettingsWidgetGenericScoreboard, - minutes: Int, - seconds: Int) - { - scoreboard.clock.minutes = minutes.clamped(to: 0 ... scoreboard.clock.maximum) - if scoreboard.clock.minutes == scoreboard.clock.maximum { - scoreboard.clock.seconds = 0 - } else { - scoreboard.clock.seconds = seconds.clamped(to: 0 ... 59) - } - } - - private func handleUpdateGenericScoreboardSetClockState(scoreboard: SettingsWidgetGenericScoreboard, - stopped: Bool) - { - scoreboard.clock.isStopped = stopped - } - - private func padelScoreboardUpdateSetCompleted(scoreboard: SettingsWidgetPadelScoreboard) { - guard let score = scoreboard.score.last else { - return - } - guard isSetCompleted(score: score) else { - return - } - guard !isMatchCompleted(scoreboard: scoreboard) else { - return - } - scoreboard.score.append(.init()) - } - - private func isCurrentSetCompleted(scoreboard: SettingsWidgetPadelScoreboard) -> Bool { - guard let score = scoreboard.score.last else { - return false - } - return isSetCompleted(score: score) - } - - private func isSetCompleted(score: SettingsWidgetScoreboardScore) -> Bool { - let maxScore = max(score.home, score.away) - let minScore = min(score.home, score.away) - if maxScore == 6 && minScore <= 4 { - return true - } - if maxScore == 7 { - return true - } - return false - } - - private func isMatchCompleted(scoreboard: SettingsWidgetPadelScoreboard) -> Bool { - if scoreboard.score.count < 5 { - return false - } - guard let score = scoreboard.score.last else { - return false - } - return isSetCompleted(score: score) - } } diff --git a/Moblin/Various/Model/ModelSettingsUrl.swift b/Moblin/Various/Model/ModelSettingsUrl.swift index deb8f4d3e..50c2cea40 100644 --- a/Moblin/Various/Model/ModelSettingsUrl.swift +++ b/Moblin/Various/Model/ModelSettingsUrl.swift @@ -80,17 +80,17 @@ extension Model { database.quickButtonsGeneral.enableScroll = enableScroll } if quickButtons.disableAllButtons == true { - for databaseQuickButton in database.quickButtons { - databaseQuickButton.enabled = false + for quickButton in database.quickButtons { + quickButton.enabled = false } } - for quickButton in quickButtons.buttons ?? [] { - if let databaseQuickButton = database.quickButtons.first(where: { quickButton.type == $0.type }) { - if let enabled = quickButton.enabled { - databaseQuickButton.enabled = enabled + for button in quickButtons.buttons ?? [] { + for quickButton in database.quickButtons { + guard button.type == quickButton.type else { + continue } - if let page = quickButton.page { - databaseQuickButton.page = page + if let enabled = button.enabled { + quickButton.enabled = enabled } } } diff --git a/Moblin/Various/Model/ModelStream.swift b/Moblin/Various/Model/ModelStream.swift index 9455250f6..2c98f1609 100644 --- a/Moblin/Various/Model/ModelStream.swift +++ b/Moblin/Various/Model/ModelStream.swift @@ -50,6 +50,7 @@ class CreateStreamWizard: ObservableObject { @Published var customRtmpUrl = "" @Published var customRtmpStreamKey = "" @Published var customRistUrl = "" + @Published var customWhipUrl = "" } enum StreamState { @@ -82,6 +83,24 @@ extension Model { ) return } + if stream.getProtocol() == .whip { + if stream.codec != .h264avc || stream.audioCodec != .opus { + makeErrorToast( + title: String(localized: "WHIP requires H.264 video and Opus audio."), + subTitle: String( + localized: "Update Settings → Streams → \(stream.name) → Video/Audio." + ) + ) + return + } + if stream.resolvedWhipEndpointUrl() == nil { + makeErrorToast( + title: String(localized: "Malformed WHIP URL"), + subTitle: String(localized: "Please use a valid whip:// URL.") + ) + return + } + } if database.location.resetWhenGoingLive { resetLocationData() } @@ -189,6 +208,8 @@ extension Model { startNetStreamSrt() case .rist: startNetStreamRist() + case .whip: + startNetStreamWhip() } updateSpeed(now: .now) streamBecameBrokenTime = nil @@ -234,12 +255,25 @@ extension Model { updateAdaptiveBitrateRistIfEnabled() } + private func startNetStreamWhip() { + guard let endpointUrl = stream.resolvedWhipEndpointUrl() else { + onDisconnected(reason: "WHIP endpoint URL invalid") + return + } + media.whipStartStream( + endpointUrl: endpointUrl, + settings: stream.whip, + videoDimensions: stream.dimensions() + ) + } + func stopNetStream() { moblink.streamer?.stopTunnels() reconnectTimer.stop() media.rtmpStopStream() media.srtStopStream() media.ristStopStream() + media.whipStopStream() streamStartTime = nil updateStreamUptime(now: .now) updateSpeed(now: .now) @@ -531,6 +565,14 @@ extension Model { } } + private func handleWhipConnected() { + onConnected() + } + + private func handleWhipDisconnected(reason: String) { + onDisconnected(reason: reason) + } + private func handleAudioBuffer(sampleBuffer: CMSampleBuffer) { DispatchQueue.main.async { self.speechToText?.append(sampleBuffer: sampleBuffer) @@ -878,6 +920,14 @@ extension Model: MediaDelegate { handleRistDisconnected() } + func mediaOnWhipConnected() { + handleWhipConnected() + } + + func mediaOnWhipDisconnected(_ reason: String) { + handleWhipDisconnected(reason: reason) + } + func mediaOnAudioMuteChange() { updateAudioLevel() } diff --git a/Moblin/Various/Model/ModelStreamWizard.swift b/Moblin/Various/Model/ModelStreamWizard.swift index babc83f25..e3e673ad0 100644 --- a/Moblin/Various/Model/ModelStreamWizard.swift +++ b/Moblin/Various/Model/ModelStreamWizard.swift @@ -22,6 +22,7 @@ enum WizardCustomProtocol { case srt case rtmp case rist + case whip func toDefaultCodec() -> SettingsStreamCodec { switch self { @@ -33,6 +34,8 @@ enum WizardCustomProtocol { return .h264avc case .rist: return .h265hevc + case .whip: + return .h264avc } } } @@ -74,6 +77,8 @@ extension Model { return url.url?.absoluteString case .rist: return createStreamWizard.customRistUrl.trim() + case .whip: + return createStreamWizard.customWhipUrl.trim() } return nil } @@ -168,6 +173,9 @@ extension Model { case .myServers: stream.codec = createStreamWizard.customProtocol.toDefaultCodec() } + if createStreamWizard.customProtocol == .whip { + stream.audioCodec = .opus + } stream.audioBitrate = 128_000 database.streams.append(stream) setCurrentStream(stream: stream) @@ -201,6 +209,7 @@ extension Model { createStreamWizard.directIngest = "" createStreamWizard.directStreamKey = "" createStreamWizard.belaboxUrl = "" + createStreamWizard.customWhipUrl = "" } func handleSettingsUrlsInWizard(settings: MoblinSettingsUrl) { diff --git a/Moblin/Various/Model/ModelWhepClient.swift b/Moblin/Various/Model/ModelWhepClient.swift new file mode 100644 index 000000000..23bf1e05a --- /dev/null +++ b/Moblin/Various/Model/ModelWhepClient.swift @@ -0,0 +1,89 @@ +import CoreMedia +import Foundation + +extension Model { + func whepCameras() -> [(UUID, String)] { + return database.whepClient.streams.map { stream in + (stream.id, stream.camera()) + } + } + + func getWhepStream(id: UUID) -> SettingsWhepClientStream? { + return database.whepClient.streams.first { stream in + stream.id == id + } + } + + func getWhepStream(idString: String) -> SettingsWhepClientStream? { + return database.whepClient.streams.first { stream in + idString == stream.id.uuidString + } + } + + func reloadWhepClient() { + stopWhepClient() + for stream in database.whepClient.streams where stream.enabled { + guard let url = URL(string: stream.url) else { + continue + } + let client = WhepClient(cameraId: stream.id, url: url, latency: stream.latencySeconds()) + client.delegate = self + client.start() + ingests.whep.append(client) + } + } + + func stopWhepClient() { + for client in ingests.whep { + client.stop() + } + ingests.whep = [] + } + + func whepClientConnectedInternal(cameraId: UUID) { + guard let stream = getWhepStream(id: cameraId) else { + return + } + let camera = stream.camera() + makeToast(title: String(localized: "\(camera) connected")) + media.addBufferedVideo(cameraId: cameraId, name: camera, latency: stream.latencySeconds()) + media.addBufferedAudio(cameraId: cameraId, name: camera, latency: stream.latencySeconds()) + } + + func whepClientDisconnectedInternal(cameraId: UUID, reason: String) { + guard let stream = getWhepStream(id: cameraId) else { + return + } + makeToast(title: String(localized: "\(stream.camera()) disconnected"), subTitle: reason) + media.removeBufferedVideo(cameraId: cameraId) + media.removeBufferedAudio(cameraId: cameraId) + switchMicIfNeededAfterNetworkCameraChange() + } +} + +extension Model: WhepClientDelegate { + func whepClientErrorToast(title: String) { + makeErrorToastMain(title: title) + } + + func whepClientConnected(cameraId: UUID) { + DispatchQueue.main.async { + self.whepClientConnectedInternal(cameraId: cameraId) + } + } + + func whepClientDisconnected(cameraId: UUID, reason: String) { + DispatchQueue.main.async { + self.whepClientDisconnectedInternal(cameraId: cameraId, reason: reason) + } + } + + func whepClientOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) { + media.appendBufferedVideoSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer) + } + + func whepClientOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) { + media.appendBufferedAudioSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer) + } +} + diff --git a/Moblin/Various/Model/ModelWhipServer.swift b/Moblin/Various/Model/ModelWhipServer.swift new file mode 100644 index 000000000..904c92b8b --- /dev/null +++ b/Moblin/Various/Model/ModelWhipServer.swift @@ -0,0 +1,122 @@ +import AVFoundation +import CoreMedia +import Foundation + +extension Model { + func whipCameras() -> [(UUID, String)] { + return database.whipServer.streams.map { stream in + (stream.id, stream.camera()) + } + } + + func getWhipStream(id: UUID) -> SettingsWhipServerStream? { + return database.whipServer.streams.first { stream in + stream.id == id + } + } + + func getWhipStream(idString: String) -> SettingsWhipServerStream? { + return database.whipServer.streams.first { stream in + idString == stream.id.uuidString + } + } + + func getWhipStream(streamKey: String) -> SettingsWhipServerStream? { + return database.whipServer.streams.first { stream in + stream.streamKey == streamKey + } + } + + func stopAllWhipStreams() { + for stream in database.whipServer.streams { + stopWhipServerStream(stream: stream, showToast: false) + } + } + + func isWhipStreamConnected(streamKey: String) -> Bool { + return ingests.whip?.isStreamConnected(streamKey: streamKey) ?? false + } + + func handleWhipServerPublishStart(streamKey: String) { + DispatchQueue.main.async { + guard let stream = self.getWhipStream(streamKey: streamKey) else { + return + } + let camera = stream.camera() + self.makeToast(title: String(localized: "\(camera) connected")) + // Cap latency for local WebRTC ingest. Values above 500ms cause audio buffer + // overflow and excessive video delay. Old saved settings may still have 2000ms. + let latency = min(Double(stream.latency) / 1000.0, 0.5) + self.media.addBufferedVideo(cameraId: stream.id, name: camera, latency: latency) + self.media.addBufferedAudio(cameraId: stream.id, name: camera, latency: latency) + } + } + + func handleWhipServerPublishStop(streamKey: String, reason: String? = nil) { + DispatchQueue.main.async { + guard let stream = self.getWhipStream(streamKey: streamKey) else { + return + } + self.stopWhipServerStream(stream: stream, showToast: true, reason: reason) + self.switchMicIfNeededAfterNetworkCameraChange() + } + } + + private func stopWhipServerStream( + stream: SettingsWhipServerStream, + showToast: Bool, + reason: String? = nil + ) { + if showToast { + makeToast(title: String(localized: "\(stream.camera()) disconnected"), subTitle: reason) + } + media.removeBufferedVideo(cameraId: stream.id) + media.removeBufferedAudio(cameraId: stream.id) + } + + func handleWhipServerFrame(cameraId: UUID, sampleBuffer: CMSampleBuffer) { + media.appendBufferedVideoSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer) + } + + func handleWhipServerAudioBuffer(cameraId: UUID, sampleBuffer: CMSampleBuffer) { + media.appendBufferedAudioSampleBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer) + } + + func stopWhipServer() { + ingests.whip?.stop() + ingests.whip = nil + stopAllWhipStreams() + } + + func reloadWhipServer() { + stopWhipServer() + if database.whipServer.enabled { + ingests.whip = WhipServer(settings: database.whipServer.clone()) + ingests.whip?.delegate = self + ingests.whip?.start() + } + } + + func whipServerEnabled() -> Bool { + return database.whipServer.enabled + } +} + +extension Model: WhipServerDelegate { + func whipServerOnPublishStart(streamKey: String) { + handleWhipServerPublishStart(streamKey: streamKey) + } + + func whipServerOnPublishStop(streamKey: String, reason: String) { + handleWhipServerPublishStop(streamKey: streamKey, reason: reason) + } + + func whipServerOnVideoBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) { + handleWhipServerFrame(cameraId: cameraId, sampleBuffer: sampleBuffer) + } + + func whipServerOnAudioBuffer(cameraId: UUID, _ sampleBuffer: CMSampleBuffer) { + handleWhipServerAudioBuffer(cameraId: cameraId, sampleBuffer: sampleBuffer) + } +} + diff --git a/Moblin/Various/Model/ModelYouTube.swift b/Moblin/Various/Model/ModelYouTube.swift index 13e09a9ac..e83eca204 100644 --- a/Moblin/Various/Model/ModelYouTube.swift +++ b/Moblin/Various/Model/ModelYouTube.swift @@ -119,15 +119,12 @@ extension Model { youTubeApi?.listVideos(videoId: self.stream.youTubeVideoId) { switch $0 { case let .success(response): - if let liveStreamingDetails = response.items.first?.liveStreamingDetails { - if liveStreamingDetails.isLive() { - let viewers = Int(liveStreamingDetails.concurrentViewers ?? "0") ?? 0 - self.youTubePlatformStatus = .live(viewerCount: viewers) - } else { - self.youTubePlatformStatus = .offline - } + if let item = response.items.first, + let viewers = Int(item.liveStreamingDetails.concurrentViewers) + { + self.youTubePlatformStatus = .live(viewerCount: viewers) } else { - self.youTubePlatformStatus = .unknown + self.youTubePlatformStatus = .offline } default: self.youTubePlatformStatus = .unknown diff --git a/Moblin/Various/Network/HttpServer.swift b/Moblin/Various/Network/HttpServer.swift index e8128470d..09eea893b 100644 --- a/Moblin/Various/Network/HttpServer.swift +++ b/Moblin/Various/Network/HttpServer.swift @@ -6,8 +6,7 @@ private struct HttpRequestParseResult { let path: String let version: String let headers: [(String, String)] - // periphery:ignore - let data: Data + let body: Data } private class HttpRequestParser: HttpParser { @@ -28,17 +27,34 @@ private class HttpRequestParser: HttpParser { return (true, nil) } var headers: [(String, String)] = [] + var contentLength: Int = 0 while let (line, nextLineOffset) = getLine(data: data, offset: offset) { - let parts = line.lowercased().split(separator: " ") - if parts.count == 2 { - headers.append((String(parts[0]), String(parts[1]))) + if let colonIndex = line.firstIndex(of: ":") { + let key = line[..= contentLength { + return (true, HttpRequestParseResult(method: String(method), + path: String(path), + version: String(version), + headers: headers, + body: body.prefix(contentLength))) + } + return (false, nil) } offset = nextLineOffset } @@ -50,14 +66,15 @@ class HttpServerRequest { let method: String let path: String let version: String - // periphery:ignore let headers: [(String, String)] + let body: Data - fileprivate init(method: String, path: String, version: String, headers: [(String, String)]) { + fileprivate init(method: String, path: String, version: String, headers: [(String, String)], body: Data) { self.method = method self.path = path self.version = version self.headers = headers + self.body = body } fileprivate func getContentType() -> String { @@ -78,18 +95,41 @@ class HttpServerRequest { return "text/html" } } + + func header(_ name: String) -> String? { + let key = name.lowercased() + return headers.first(where: { $0.0 == key })?.1 + } } enum HttpServerStatus { case ok + case created + case noContent + case badRequest case notFound + case methodNotAllowed + case unsupportedMediaType + case internalServerError func code() -> Int { switch self { case .ok: return 200 + case .created: + return 201 + case .noContent: + return 204 + case .badRequest: + return 400 case .notFound: return 404 + case .methodNotAllowed: + return 405 + case .unsupportedMediaType: + return 415 + case .internalServerError: + return 500 } } @@ -97,8 +137,20 @@ enum HttpServerStatus { switch self { case .ok: return "OK" + case .created: + return "Created" + case .noContent: + return "No Content" + case .badRequest: + return "Bad Request" case .notFound: return "Not Found" + case .methodNotAllowed: + return "Method Not Allowed" + case .unsupportedMediaType: + return "Unsupported Media Type" + case .internalServerError: + return "Internal Server Error" } } } @@ -110,12 +162,22 @@ class HttpServerResponse { self.connection = connection } - func send(data: Data, status: HttpServerStatus = .ok) { - connection?.sendAndClose(status: status, content: data) + func send( + data: Data, + status: HttpServerStatus = .ok, + contentType: String? = nil, + headers: [(String, String)] = [] + ) { + connection?.sendAndClose(status: status, contentType: contentType, headers: headers, content: data) } - func send(text: String, status: HttpServerStatus = .ok) { - send(data: text.utf8Data, status: status) + func send( + text: String, + status: HttpServerStatus = .ok, + contentType: String? = nil, + headers: [(String, String)] = [] + ) { + send(data: text.utf8Data, status: status, contentType: contentType, headers: headers) } } @@ -156,22 +218,26 @@ private class HttpServerConnection { request = HttpServerRequest(method: result.method, path: result.path, version: result.version, - headers: result.headers) + headers: result.headers, + body: result.body) guard let route = server.findRoute(request: request!) else { - sendAndClose(status: .notFound, content: Data()) + sendAndClose(status: .notFound, contentType: nil, headers: [], content: Data()) return } route.handler(request!, HttpServerResponse(connection: self)) } - func sendAndClose(status: HttpServerStatus, content: Data) { + func sendAndClose(status: HttpServerStatus, contentType: String?, headers: [(String, String)], content: Data) { guard let request else { return } var lines: [String] = [] lines.append("\(request.version) \(status.code()) \(status.text())") if !content.isEmpty { - lines.append("Content-Type: \(request.getContentType())") + lines.append("Content-Type: \(contentType ?? request.getContentType())") + } + for header in headers { + lines.append("\(header.0): \(header.1)") } lines.append("Connection: close") lines.append("") @@ -198,7 +264,7 @@ class HttpServerRoute { class HttpServer { private let queue: DispatchQueue - private let routes: [HttpServerRoute] + private var routes: [HttpServerRoute] private var listener: NWListener? private let retryTimer: SimpleTimer private var port: NWEndpoint.Port = .http @@ -210,6 +276,12 @@ class HttpServer { retryTimer = SimpleTimer(queue: queue) } + func setRoutes(_ routes: [HttpServerRoute]) { + queue.async { + self.routes = routes + } + } + func start(port: NWEndpoint.Port) { logger.debug("http-server: Start") queue.async { diff --git a/Moblin/Various/Settings/Settings.swift b/Moblin/Various/Settings/Settings.swift index 6246b8ee1..2df34d2d6 100644 --- a/Moblin/Various/Settings/Settings.swift +++ b/Moblin/Various/Settings/Settings.swift @@ -12,9 +12,11 @@ enum SettingsCameraId { case back(id: String) case front(id: String) case rtmp(id: UUID) + case whip(id: UUID) case srtla(id: UUID) case rist(id: UUID) case rtsp(id: UUID) + case whep(id: UUID) case mediaPlayer(id: UUID) case external(id: String, name: String) case screenCapture @@ -1107,6 +1109,7 @@ class Database: Codable, ObservableObject { var quickButtonsGeneral: SettingsQuickButtons = .init() @Published var quickButtons: [SettingsQuickButton] = [] var rtmpServer: SettingsRtmpServer = .init() + var whipServer: SettingsWhipServer = .init() @Published var networkInterfaceNames: [SettingsNetworkInterfaceName] = [] @Published var lowBitrateWarning: Bool = true @Published var vibrate: Bool = false @@ -1158,6 +1161,7 @@ class Database: Codable, ObservableObject { var ristServer: SettingsRistServer = .init() var disconnectProtection: SettingsDisconnectProtection = .init() var rtspClient: SettingsRtspClient = .init() + var whepClient: SettingsWhepClient = .init() var navigation: SettingsNavigation = .init() var wiFiAware: SettingsWiFiAware = .init() var face: SettingsFace = .init() @@ -1211,6 +1215,7 @@ class Database: Codable, ObservableObject { quickButtons, globalButtons, rtmpServer, + whipServer, networkInterfaceNames, lowBitrateWarning, vibrate, @@ -1261,6 +1266,7 @@ class Database: Codable, ObservableObject { ristServer, disconnectProtection, rtspClient, + whepClient, navigation, wiFiAware, face, @@ -1285,6 +1291,7 @@ class Database: Codable, ObservableObject { try container.encode(.quickButtons, quickButtonsGeneral) try container.encode(.globalButtons, quickButtons) try container.encode(.rtmpServer, rtmpServer) + try container.encode(.whipServer, whipServer) try container.encode(.networkInterfaceNames, networkInterfaceNames) try container.encode(.lowBitrateWarning, lowBitrateWarning) try container.encode(.vibrate, vibrate) @@ -1335,6 +1342,7 @@ class Database: Codable, ObservableObject { try container.encode(.ristServer, ristServer) try container.encode(.disconnectProtection, disconnectProtection) try container.encode(.rtspClient, rtspClient) + try container.encode(.whepClient, whepClient) try container.encode(.navigation, navigation) try container.encode(.wiFiAware, wiFiAware) try container.encode(.face, face) @@ -1365,6 +1373,7 @@ class Database: Codable, ObservableObject { quickButtonsGeneral = container.decode(.quickButtons, SettingsQuickButtons.self, .init()) quickButtons = container.decode(.globalButtons, [SettingsQuickButton].self, []) rtmpServer = container.decode(.rtmpServer, SettingsRtmpServer.self, .init()) + whipServer = container.decode(.whipServer, SettingsWhipServer.self, .init()) networkInterfaceNames = container.decode( .networkInterfaceNames, [SettingsNetworkInterfaceName].self, @@ -1444,6 +1453,7 @@ class Database: Codable, ObservableObject { .init() ) rtspClient = container.decode(.rtspClient, SettingsRtspClient.self, .init()) + whepClient = container.decode(.whepClient, SettingsWhepClient.self, .init()) navigation = container.decode(.navigation, SettingsNavigation.self, .init()) wiFiAware = container.decode(.wiFiAware, SettingsWiFiAware.self, .init()) face = (try? container.decode(SettingsFace.self, forKey: .face)) ?? debug.faceToBeRemoved @@ -1915,7 +1925,6 @@ private func addMissingDeepLinkQuickButtons(database: Database) { let buttonExists = quickButtons.buttons.contains(where: { quickButton.type == $0.type }) if !buttonExists { button.type = quickButton.type - button.page = quickButton.page quickButtons.buttons.append(button) } } diff --git a/Moblin/Various/Settings/SettingsDeepLinkCreator.swift b/Moblin/Various/Settings/SettingsDeepLinkCreator.swift index d3dc856b3..7e6ad6623 100644 --- a/Moblin/Various/Settings/SettingsDeepLinkCreator.swift +++ b/Moblin/Various/Settings/SettingsDeepLinkCreator.swift @@ -220,16 +220,14 @@ class DeepLinkCreatorStream: Codable, Identifiable, ObservableObject, Named { } class DeepLinkCreatorQuickButton: Codable, Identifiable, ObservableObject { - var id: UUID = .init() + @Published var id: UUID = .init() @Published var type: SettingsQuickButtonType = .unknown @Published var enabled: Bool = false - @Published var page: Int = 1 enum CodingKeys: CodingKey { case id, type, - enabled, - page + enabled } func encode(to encoder: Encoder) throws { @@ -237,7 +235,6 @@ class DeepLinkCreatorQuickButton: Codable, Identifiable, ObservableObject { try container.encode(.id, id) try container.encode(.type, type) try container.encode(.enabled, enabled) - try container.encode(.page, page) } init() {} @@ -247,7 +244,6 @@ class DeepLinkCreatorQuickButton: Codable, Identifiable, ObservableObject { id = container.decode(.id, UUID.self, .init()) type = container.decode(.type, SettingsQuickButtonType.self, .unknown) enabled = container.decode(.enabled, Bool.self, false) - page = container.decode(.page, Int.self, 1) } } diff --git a/Moblin/Various/Settings/SettingsIngests.swift b/Moblin/Various/Settings/SettingsIngests.swift index 2b01cc794..b4fe7f7aa 100644 --- a/Moblin/Various/Settings/SettingsIngests.swift +++ b/Moblin/Various/Settings/SettingsIngests.swift @@ -1,6 +1,7 @@ import Foundation private let defaultRtmpLatency: Int32 = 2000 +private let defaultWhipLatency: Int32 = 200 class SettingsRtmpServerStream: Codable, Identifiable, ObservableObject, Named { static let baseName = String(localized: "My stream") @@ -86,6 +87,90 @@ class SettingsRtmpServer: Codable, ObservableObject { } } +class SettingsWhipServerStream: Codable, Identifiable, ObservableObject, Named { + static let baseName = String(localized: "My stream") + var id: UUID = .init() + @Published var name: String = baseName + @Published var streamKey: String = "" + @Published var latency: Int32 = defaultWhipLatency + + enum CodingKeys: CodingKey { + case id, + name, + streamKey, + latency + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(.id, id) + try container.encode(.name, name) + try container.encode(.streamKey, streamKey) + try container.encode(.latency, latency) + } + + init() {} + + required init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + id = container.decode(.id, UUID.self, .init()) + name = container.decode(.name, String.self, Self.baseName) + streamKey = container.decode(.streamKey, String.self, "") + latency = container.decode(.latency, Int32.self, defaultWhipLatency) + } + + func camera() -> String { + return "WHIP \(name)" + } + + func clone() -> SettingsWhipServerStream { + let new = SettingsWhipServerStream() + new.id = id + new.name = name + new.streamKey = streamKey + new.latency = latency + return new + } +} + +class SettingsWhipServer: Codable, ObservableObject { + @Published var enabled: Bool = false + @Published var port: UInt16 = 8080 + @Published var streams: [SettingsWhipServerStream] = [] + + enum CodingKeys: CodingKey { + case enabled, + port, + streams + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(.enabled, enabled) + try container.encode(.port, port) + try container.encode(.streams, streams) + } + + init() {} + + required init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + enabled = container.decode(.enabled, Bool.self, false) + port = container.decode(.port, UInt16.self, 8080) + streams = container.decode(.streams, [SettingsWhipServerStream].self, []) + } + + func clone() -> SettingsWhipServer { + let new = SettingsWhipServer() + new.enabled = enabled + new.port = port + for stream in streams { + new.streams.append(stream.clone()) + } + return new + } +} + class SettingsSrtlaServerStream: Codable, Identifiable, ObservableObject, Named { static let baseName = String(localized: "My stream") var id: UUID = .init() @@ -307,3 +392,68 @@ class SettingsRtspClient: Codable, ObservableObject { streams = container.decode(.streams, [SettingsRtspClientStream].self, []) } } + +class SettingsWhepClientStream: Codable, Identifiable, ObservableObject, Named { + static let baseName = String(localized: "My stream") + var id: UUID = .init() + @Published var name: String = baseName + @Published var url: String = "" + @Published var enabled: Bool = false + @Published var latency: Int32 = 200 + + enum CodingKeys: CodingKey { + case id, + name, + url, + enabled, + latency + } + + func latencySeconds() -> Double { + return Double(latency) / 1000 + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(.id, id) + try container.encode(.name, name) + try container.encode(.url, url) + try container.encode(.enabled, enabled) + try container.encode(.latency, latency) + } + + init() {} + + required init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + id = container.decode(.id, UUID.self, .init()) + name = container.decode(.name, String.self, Self.baseName) + url = container.decode(.url, String.self, "") + enabled = container.decode(.enabled, Bool.self, false) + latency = container.decode(.latency, Int32.self, 2000) + } + + func camera() -> String { + return whepCamera(name: name) + } +} + +class SettingsWhepClient: Codable, ObservableObject { + @Published var streams: [SettingsWhepClientStream] = [] + + enum CodingKeys: CodingKey { + case streams + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(.streams, streams) + } + + init() {} + + required init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + streams = container.decode(.streams, [SettingsWhepClientStream].self, []) + } +} diff --git a/Moblin/Various/Settings/SettingsScene.swift b/Moblin/Various/Settings/SettingsScene.swift index 4644548e1..dbc349443 100644 --- a/Moblin/Various/Settings/SettingsScene.swift +++ b/Moblin/Various/Settings/SettingsScene.swift @@ -773,28 +773,11 @@ class SettingsWidgetCrop: Codable { } } -enum SettingsWidgetBrowserMode: String, Codable, CaseIterable { - case periodicAudioAndVideo - case audioAndVideoOnly - case audioOnly - - func toString() -> String { - switch self { - case .periodicAudioAndVideo: - return String(localized: "Periodic, audio and video") - case .audioAndVideoOnly: - return String(localized: "Audio and video only") - case .audioOnly: - return String(localized: "Audio only") - } - } -} - class SettingsWidgetBrowser: Codable, ObservableObject { @Published var url: String = "" @Published var width: Int = 500 @Published var height: Int = 500 - @Published var mode: SettingsWidgetBrowserMode = .periodicAudioAndVideo + @Published var audioAndVideoOnly: Bool = false @Published var baseFps: Float = 5.0 @Published var styleSheet: String = "" @Published var moblinAccess: Bool = false @@ -806,7 +789,6 @@ class SettingsWidgetBrowser: Codable, ObservableObject { width, height, audioOnly, - mode, fps, styleSheet, moblinAccess @@ -817,7 +799,7 @@ class SettingsWidgetBrowser: Codable, ObservableObject { try container.encode(.url, url) try container.encode(.width, width) try container.encode(.height, height) - try container.encode(.mode, mode) + try container.encode(.audioOnly, audioAndVideoOnly) try container.encode(.fps, baseFps) try container.encode(.styleSheet, styleSheet) try container.encode(.moblinAccess, moblinAccess) @@ -828,12 +810,7 @@ class SettingsWidgetBrowser: Codable, ObservableObject { url = container.decode(.url, String.self, "") width = container.decode(.width, Int.self, 500) height = container.decode(.height, Int.self, 500) - if let decodedMode = try? container.decode(SettingsWidgetBrowserMode.self, forKey: .mode) { - mode = decodedMode - } else { - let audioOnly = container.decode(.audioOnly, Bool.self, false) - mode = audioOnly ? .audioAndVideoOnly : .periodicAudioAndVideo - } + audioAndVideoOnly = container.decode(.audioOnly, Bool.self, false) baseFps = container.decode(.fps, Float.self, 5.0) styleSheet = container.decode(.styleSheet, String.self, "") moblinAccess = container.decode(.moblinAccess, Bool.self, false) @@ -1503,9 +1480,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject { backCameraId, frontCameraId, rtmpCameraId, + whipCameraId, srtlaCameraId, ristCameraId, rtspCameraId, + whepCameraId, mediaPlayerCameraId, externalCameraId, externalCameraName, @@ -1524,9 +1503,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject { try container.encode(.backCameraId, videoSource.backCameraId) try container.encode(.frontCameraId, videoSource.frontCameraId) try container.encode(.rtmpCameraId, videoSource.rtmpCameraId) + try container.encode(.whipCameraId, videoSource.whipCameraId) try container.encode(.srtlaCameraId, videoSource.srtlaCameraId) try container.encode(.ristCameraId, videoSource.ristCameraId) try container.encode(.rtspCameraId, videoSource.rtspCameraId) + try container.encode(.whepCameraId, videoSource.whepCameraId) try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId) try container.encode(.externalCameraId, videoSource.externalCameraId) try container.encode(.externalCameraName, videoSource.externalCameraName) @@ -1543,9 +1524,11 @@ class SettingsWidgetVTuber: Codable, ObservableObject { videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId) videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId) videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init()) + videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init()) videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init()) videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init()) videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init()) + videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init()) videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init()) videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "") videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "") @@ -1576,9 +1559,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject { backCameraId, frontCameraId, rtmpCameraId, + whipCameraId, srtlaCameraId, ristCameraId, rtspCameraId, + whepCameraId, mediaPlayerCameraId, externalCameraId, externalCameraName, @@ -1595,9 +1580,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject { try container.encode(.backCameraId, videoSource.backCameraId) try container.encode(.frontCameraId, videoSource.frontCameraId) try container.encode(.rtmpCameraId, videoSource.rtmpCameraId) + try container.encode(.whipCameraId, videoSource.whipCameraId) try container.encode(.srtlaCameraId, videoSource.srtlaCameraId) try container.encode(.ristCameraId, videoSource.ristCameraId) try container.encode(.rtspCameraId, videoSource.rtspCameraId) + try container.encode(.whepCameraId, videoSource.whepCameraId) try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId) try container.encode(.externalCameraId, videoSource.externalCameraId) try container.encode(.externalCameraName, videoSource.externalCameraName) @@ -1612,9 +1599,11 @@ class SettingsWidgetPngTuber: Codable, ObservableObject { videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId) videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId) videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init()) + videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init()) videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init()) videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init()) videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init()) + videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init()) videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init()) videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "") videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "") @@ -2280,10 +2269,12 @@ enum SettingsSceneCameraPosition: String, Codable, CaseIterable { case back = "Back" case front = "Front" case rtmp = "RTMP" + case whip = "WHIP" case external = "External" case srtla = "SRT(LA)" case rist = "RIST" case rtsp = "RTSP" + case whep = "WHEP" case mediaPlayer = "Media player" case screenCapture = "Screen capture" case backTripleLowEnergy = "Back triple" @@ -2314,9 +2305,11 @@ struct SettingsVideoSource { var backCameraId: String = bestBackCameraId var frontCameraId: String = bestFrontCameraId var rtmpCameraId: UUID = .init() + var whipCameraId: UUID = .init() var srtlaCameraId: UUID = .init() var ristCameraId: UUID = .init() var rtspCameraId: UUID = .init() + var whepCameraId: UUID = .init() var mediaPlayerCameraId: UUID = .init() var externalCameraId: String = "" var externalCameraName: String = "" @@ -2329,6 +2322,8 @@ struct SettingsVideoSource { return .front(id: frontCameraId) case .rtmp: return .rtmp(id: rtmpCameraId) + case .whip: + return .whip(id: whipCameraId) case .external: return .external(id: externalCameraId, name: externalCameraName) case .srtla: @@ -2337,6 +2332,8 @@ struct SettingsVideoSource { return .rist(id: ristCameraId) case .rtsp: return .rtsp(id: rtspCameraId) + case .whep: + return .whep(id: whepCameraId) case .mediaPlayer: return .mediaPlayer(id: mediaPlayerCameraId) case .screenCapture: @@ -2363,6 +2360,9 @@ struct SettingsVideoSource { case let .rtmp(id: id): cameraPosition = .rtmp rtmpCameraId = id + case let .whip(id: id): + cameraPosition = .whip + whipCameraId = id case let .srtla(id: id): cameraPosition = .srtla srtlaCameraId = id @@ -2372,6 +2372,9 @@ struct SettingsVideoSource { case let .rtsp(id: id): cameraPosition = .rtsp rtspCameraId = id + case let .whep(id: id): + cameraPosition = .whep + whepCameraId = id case let .mediaPlayer(id: id): cameraPosition = .mediaPlayer mediaPlayerCameraId = id @@ -2428,12 +2431,16 @@ struct SettingsVideoSource { switch cameraPosition { case .rtmp: return cameraId == rtmpCameraId + case .whip: + return cameraId == whipCameraId case .srtla: return cameraId == srtlaCameraId case .rist: return cameraId == ristCameraId case .rtsp: return cameraId == rtspCameraId + case .whep: + return cameraId == whepCameraId default: return false } @@ -2462,9 +2469,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject { backCameraId, frontCameraId, rtmpCameraId, + whipCameraId, srtlaCameraId, ristCameraId, rtspCameraId, + whepCameraId, mediaPlayerCameraId, externalCameraId, externalCameraName, @@ -2492,9 +2501,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject { try container.encode(.backCameraId, videoSource.backCameraId) try container.encode(.frontCameraId, videoSource.frontCameraId) try container.encode(.rtmpCameraId, videoSource.rtmpCameraId) + try container.encode(.whipCameraId, videoSource.whipCameraId) try container.encode(.srtlaCameraId, videoSource.srtlaCameraId) try container.encode(.ristCameraId, videoSource.ristCameraId) try container.encode(.rtspCameraId, videoSource.rtspCameraId) + try container.encode(.whepCameraId, videoSource.whepCameraId) try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId) try container.encode(.externalCameraId, videoSource.externalCameraId) try container.encode(.externalCameraName, videoSource.externalCameraName) @@ -2518,9 +2529,11 @@ class SettingsWidgetVideoSource: Codable, ObservableObject { videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId) videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId) videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init()) + videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init()) videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init()) videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init()) videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init()) + videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init()) videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init()) videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "") videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "") @@ -2809,7 +2822,7 @@ class SettingsWidgetScoreboardClock: Codable, ObservableObject { @Published var direction: SettingsWidgetGenericScoreboardClockDirection = .up var minutes: Int = 0 var seconds: Int = 0 - @Published var isStopped: Bool = true + var isStopped: Bool = true enum CodingKeys: CodingKey { case maximum, @@ -3250,9 +3263,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named { backCameraId, frontCameraId, rtmpCameraId, + whipCameraId, srtlaCameraId, ristCameraId, rtspCameraId, + whepCameraId, mediaPlayerCameraId, externalCameraId, externalCameraName, @@ -3275,9 +3290,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named { try container.encode(.backCameraId, videoSource.backCameraId) try container.encode(.frontCameraId, videoSource.frontCameraId) try container.encode(.rtmpCameraId, videoSource.rtmpCameraId) + try container.encode(.whipCameraId, videoSource.whipCameraId) try container.encode(.srtlaCameraId, videoSource.srtlaCameraId) try container.encode(.ristCameraId, videoSource.ristCameraId) try container.encode(.rtspCameraId, videoSource.rtspCameraId) + try container.encode(.whepCameraId, videoSource.whepCameraId) try container.encode(.mediaPlayerCameraId, videoSource.mediaPlayerCameraId) try container.encode(.externalCameraId, videoSource.externalCameraId) try container.encode(.externalCameraName, videoSource.externalCameraName) @@ -3304,9 +3321,11 @@ class SettingsScene: Codable, Identifiable, Equatable, ObservableObject, Named { videoSource.backCameraId = decodeCameraId(container, .backCameraId, bestBackCameraId) videoSource.frontCameraId = decodeCameraId(container, .frontCameraId, bestFrontCameraId) videoSource.rtmpCameraId = container.decode(.rtmpCameraId, UUID.self, .init()) + videoSource.whipCameraId = container.decode(.whipCameraId, UUID.self, .init()) videoSource.srtlaCameraId = container.decode(.srtlaCameraId, UUID.self, .init()) videoSource.ristCameraId = container.decode(.ristCameraId, UUID.self, .init()) videoSource.rtspCameraId = container.decode(.rtspCameraId, UUID.self, .init()) + videoSource.whepCameraId = container.decode(.whepCameraId, UUID.self, .init()) videoSource.mediaPlayerCameraId = container.decode(.mediaPlayerCameraId, UUID.self, .init()) videoSource.externalCameraId = container.decode(.externalCameraId, String.self, "") videoSource.externalCameraName = container.decode(.externalCameraName, String.self, "") diff --git a/Moblin/Various/Settings/SettingsStream.swift b/Moblin/Various/Settings/SettingsStream.swift index 6c09e3517..758c466cb 100644 --- a/Moblin/Various/Settings/SettingsStream.swift +++ b/Moblin/Various/Settings/SettingsStream.swift @@ -151,6 +151,7 @@ enum SettingsStreamProtocol: String, Codable { case rtmp = "RTMP" case srt = "SRT" case rist = "RIST" + case whip = "WHIP" init(from decoder: Decoder) throws { self = try SettingsStreamProtocol(rawValue: decoder.singleValueContainer().decode(RawValue.self)) ?? @@ -164,6 +165,7 @@ enum SettingsStreamDetailedProtocol { case srt case srtla case rist + case whip } class SettingsStreamSrtConnectionPriority: Codable, Identifiable { @@ -548,6 +550,20 @@ class SettingsStreamRist: Codable { } } +class SettingsStreamWhip: Codable { + var iceServers: [String] = [] + var maxRetryCount: Int = 0 + var insecureHttpAllowed: Bool = false + + func clone() -> SettingsStreamWhip { + let new = SettingsStreamWhip() + new.iceServers = iceServers + new.maxRetryCount = maxRetryCount + new.insecureHttpAllowed = insecureHttpAllowed + return new + } +} + class SettingsStreamChat: Codable { var bttvEmotes: Bool = false var ffzEmotes: Bool = false @@ -1028,6 +1044,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named var srt: SettingsStreamSrt = .init() var rtmp: SettingsStreamRtmp = .init() var rist: SettingsStreamRist = .init() + var whip: SettingsStreamWhip = .init() @Published var maxKeyFrameInterval: Int32 = 2 @Published var audioCodec: SettingsStreamAudioCodec = .aac var audioBitrate: Int = 128_000 @@ -1113,6 +1130,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named srt, rtmp, rist, + whip, captureSessionPresetEnabled, captureSessionPreset, maxKeyFrameInterval, @@ -1197,6 +1215,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named try container.encode(.srt, srt) try container.encode(.rtmp, rtmp) try container.encode(.rist, rist) + try container.encode(.whip, whip) try container.encode(.maxKeyFrameInterval, maxKeyFrameInterval) try container.encode(.audioCodec, audioCodec) try container.encode(.audioBitrate, audioBitrate) @@ -1290,6 +1309,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named srt = container.decode(.srt, SettingsStreamSrt.self, .init()) rtmp = container.decode(.rtmp, SettingsStreamRtmp.self, .init()) rist = container.decode(.rist, SettingsStreamRist.self, .init()) + whip = container.decode(.whip, SettingsStreamWhip.self, .init()) maxKeyFrameInterval = container.decode(.maxKeyFrameInterval, Int32.self, 2) audioCodec = container.decode(.audioCodec, SettingsStreamAudioCodec.self, .aac) audioBitrate = container.decode(.audioBitrate, Int.self, 128_000) @@ -1374,6 +1394,7 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named new.srt = srt.clone() new.rtmp = rtmp.clone() new.rist = rist.clone() + new.whip = whip.clone() new.maxKeyFrameInterval = maxKeyFrameInterval new.audioCodec = audioCodec new.audioBitrate = audioBitrate @@ -1404,12 +1425,20 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named return .rtmp case "rtmps": return .rtmp + case "http": + return .whip + case "https": + return .whip case "srt": return .srt case "srtla": return .srt case "rist": return .rist + case "whip": + return .whip + case "whips": + return .whip default: return .rtmp } @@ -1421,17 +1450,43 @@ class SettingsStream: Codable, Identifiable, Equatable, ObservableObject, Named return .rtmp case "rtmps": return .rtmps + case "http": + return .whip + case "https": + return .whip case "srt": return .srt case "srtla": return .srtla case "rist": return .rist + case "whip": + return .whip + case "whips": + return .whip default: return .rtmp } } + func resolvedWhipEndpointUrl() -> URL? { + guard getProtocol() == .whip else { + return nil + } + guard var components = URLComponents(string: url) else { + return nil + } + switch components.scheme { + case "whip", "whips": + components.scheme = "https" + return components.url + case "http", "https": + return components.url + default: + return nil + } + } + func protocolString() -> String { if getProtocol() == .srt && isSrtla() { return "SRTLA" diff --git a/Moblin/VideoEffects/Browser/BrowserEffect.swift b/Moblin/VideoEffects/Browser/BrowserEffect.swift index 92f56d1ac..5b11c2c47 100644 --- a/Moblin/VideoEffects/Browser/BrowserEffect.swift +++ b/Moblin/VideoEffects/Browser/BrowserEffect.swift @@ -47,7 +47,7 @@ final class BrowserEffect: VideoEffect { let height: Double private let url: URL private(set) var isLoaded: Bool - private let mode: SettingsWidgetBrowserMode + private let audioAndVideoOnly: Bool private var baseFps: Double private var fps: Double private let snapshotTimer = SimpleTimer(queue: .main) @@ -75,7 +75,7 @@ final class BrowserEffect: VideoEffect { baseFps = Double(widget.baseFps) fps = baseFps isLoaded = false - mode = widget.mode + audioAndVideoOnly = widget.audioAndVideoOnly width = Double(widget.width) height = Double(widget.height) snapshotConfiguration = WKSnapshotConfiguration() @@ -107,7 +107,7 @@ final class BrowserEffect: VideoEffect { } override func isEnabled() -> Bool { - return mode != .audioOnly && snapshot != nil + return snapshot != nil } func sendChatMessage(post: ChatPost) { @@ -189,7 +189,7 @@ final class BrowserEffect: VideoEffect { } private func startTakeSnapshots() { - guard !stopped, mode == .periodicAudioAndVideo else { + guard !stopped, !audioAndVideoOnly else { return } resumeTakeSnapshots() @@ -239,14 +239,12 @@ final class BrowserEffect: VideoEffect { extension BrowserEffect: BrowserEffectServerDelegate { func browserEffectServerVideoPlaying() { fps = 30 - if mode != .audioOnly { - resumeTakeSnapshots() - } + resumeTakeSnapshots() } func browserEffectServerVideoEnded() { fps = baseFps - guard mode != .periodicAudioAndVideo else { + guard audioAndVideoOnly else { return } suspendTakeSnapshots() diff --git a/Moblin/VideoEffects/FaceEffect.swift b/Moblin/VideoEffects/FaceEffect.swift index 47fbb455e..fba38419d 100644 --- a/Moblin/VideoEffects/FaceEffect.swift +++ b/Moblin/VideoEffects/FaceEffect.swift @@ -36,7 +36,7 @@ final class FaceEffect: VideoEffect { } override func needsFaceDetections(_: Double) -> VideoEffectDetectionsMode { - if settings.blurFaces || settings.blurBackground || settings.showMouth { + if settings.blurFaces || settings.blurBackground { return .now(nil) } else { return .off diff --git a/Moblin/VideoEffects/Scoreboard/ScoreboardEffect.swift b/Moblin/VideoEffects/Scoreboard/ScoreboardEffect.swift index 57dcb3a23..9882d0aba 100644 --- a/Moblin/VideoEffects/Scoreboard/ScoreboardEffect.swift +++ b/Moblin/VideoEffects/Scoreboard/ScoreboardEffect.swift @@ -1,8 +1,5 @@ import SwiftUI -let scoreboardScoreFontSize = 37.0 -let scoreboardScoreBigFontSize = 45.0 - struct TeamScoreView: View { var score: Int diff --git a/Moblin/VideoEffects/Scoreboard/ScoreboardEffectGenericView.swift b/Moblin/VideoEffects/Scoreboard/ScoreboardEffectGenericView.swift index 25c1d7e24..48b69f039 100644 --- a/Moblin/VideoEffects/Scoreboard/ScoreboardEffectGenericView.swift +++ b/Moblin/VideoEffects/Scoreboard/ScoreboardEffectGenericView.swift @@ -17,7 +17,7 @@ struct ScoreboardEffectGenericView: View { } .padding(5) .background(secondaryBackgroundColor) - HStack(alignment: .center, spacing: 6) { + HStack(alignment: .center, spacing: 18) { VStack(alignment: .leading) { VStack(alignment: .leading) { Spacer(minLength: 0) @@ -36,10 +36,10 @@ struct ScoreboardEffectGenericView: View { TeamScoreView(score: generic.score.home) TeamScoreView(score: generic.score.away) } - .font(.system(size: scoreboardScoreFontSize)) - .frame(width: scoreboardScoreFontSize * 1.33) + .frame(width: 28) + .font(.system(size: 45)) } - .padding([.horizontal], 5) + .padding([.leading, .trailing], 5) .background(primaryBackgroundColor) PoweredByMoblinView(backgroundColor: secondaryBackgroundColor) } diff --git a/Moblin/VideoEffects/Scoreboard/ScoreboardEffectPadelView.swift b/Moblin/VideoEffects/Scoreboard/ScoreboardEffectPadelView.swift index ea156ed97..2e264e042 100644 --- a/Moblin/VideoEffects/Scoreboard/ScoreboardEffectPadelView.swift +++ b/Moblin/VideoEffects/Scoreboard/ScoreboardEffectPadelView.swift @@ -59,15 +59,6 @@ struct ScoreboardEffectPadelView: View { let padel: SettingsWidgetPadelScoreboard let players: [SettingsWidgetScoreboardPlayer] - private func scoreFontSize() -> Double { - switch padel.type { - case .doubles: - return scoreboardScoreBigFontSize - case .singles: - return scoreboardScoreFontSize - } - } - var body: some View { let scoreboard = padelScoreboardSettingsToEffect(padel, players) VStack(alignment: .leading, spacing: 0) { @@ -97,11 +88,11 @@ struct ScoreboardEffectPadelView: View { .bold(score.isAwayWin()) } .frame(width: 28) - .font(.system(size: scoreFontSize())) + .font(.system(size: 45)) } + Spacer() } - .padding([.leading], 3) - .padding([.trailing], 18) + .padding([.horizontal], 3) .padding([.top], 3) .background(primaryBackgroundColor) PoweredByMoblinView(backgroundColor: secondaryBackgroundColor) diff --git a/Moblin/View/ControlBar/QuickButton/QuickButtonGoProView.swift b/Moblin/View/ControlBar/QuickButton/QuickButtonGoProView.swift index 93fb3d87a..92f08b174 100644 --- a/Moblin/View/ControlBar/QuickButton/QuickButtonGoProView.swift +++ b/Moblin/View/ControlBar/QuickButton/QuickButtonGoProView.swift @@ -175,18 +175,18 @@ struct QuickButtonGoProView: View { QuickButtonGoProLaunchLiveStreamView( goProState: goProState, goPro: goPro, - height: qrCodeHeight(metrics) + height: metrics.size.height ) .id(0) QuickButtonGoProWifiCredentialsView( goProState: goProState, goPro: goPro, - height: qrCodeHeight(metrics) + height: metrics.size.height ) .id(1) QuickButtonGoProRtmpUrlView(goProState: goProState, goPro: goPro, - height: qrCodeHeight(metrics)) + height: metrics.size.height) .id(2) } .containerRelativeFrame(.horizontal, count: 1, spacing: 0) diff --git a/Moblin/View/ControlBar/QuickButton/QuickButtonSceneWidgetsView.swift b/Moblin/View/ControlBar/QuickButton/QuickButtonSceneWidgetsView.swift index 7c36d1653..ce36b3e3c 100644 --- a/Moblin/View/ControlBar/QuickButton/QuickButtonSceneWidgetsView.swift +++ b/Moblin/View/ControlBar/QuickButton/QuickButtonSceneWidgetsView.swift @@ -1,6 +1,521 @@ import AVFoundation import SwiftUI +private struct TimeComponentPickerView: View { + let title: LocalizedStringKey + let range: Range + @Binding var time: Int + + var body: some View { + VStack { + Text(title) + Picker("", selection: $time) { + ForEach(range, id: \.self) { + Text(String($0)) + } + } + .pickerStyle(.wheel) + .frame(width: 100, height: 150) + } + } +} + +private struct TimeButtonView: View { + let text: LocalizedStringKey + let action: () -> Void + + var body: some View { + Button { + action() + } label: { + Text(text) + .frame(width: 100, height: 30) + } + } +} + +private struct TimePickerView: View { + @State private var hours: Int + @State private var minutes: Int + @State private var seconds: Int + private let onSet: (Double) -> Void + private let onCancel: () -> Void + + init(time: Double, onSet: @escaping (Double) -> Void, onCancel: @escaping () -> Void) { + let time = Int(time) + seconds = time % 60 + minutes = (time / 60) % 60 + hours = min(time / 3600, 23) + self.onSet = onSet + self.onCancel = onCancel + } + + var body: some View { + VStack { + HStack { + TimeComponentPickerView(title: "Hours", range: 0 ..< 24, time: $hours) + TimeComponentPickerView(title: "Minutes", range: 0 ..< 60, time: $minutes) + TimeComponentPickerView(title: "Seconds", range: 0 ..< 60, time: $seconds) + } + .padding() + HStack { + TimeButtonView(text: "Set") { + onSet(Double(hours * 3600 + minutes * 60 + seconds)) + } + TimeButtonView(text: "Cancel") { + onCancel() + } + } + .buttonStyle(.borderedProminent) + .padding() + } + .padding() + } +} + +struct TimerWidgetView: View { + let name: String + @ObservedObject var timer: SettingsWidgetTextTimer + let index: Int + let textEffects: [TextEffect] + let indented: Bool + @State private var presentingSetTime: Bool = false + + private func updateTextEffect() { + for effect in textEffects { + effect.setEndTime(index: index, endTime: timer.textEffectEndTime()) + } + } + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + VStack(alignment: .leading) { + HStack { + Text(name) + Spacer() + Text(timer.format()) + } + HStack { + Picker("", selection: $timer.delta) { + ForEach([1, 2, 5, 15, 60], id: \.self) { delta in + Text("\(delta) min") + .tag(delta) + } + } + Button { + timer.add(delta: -60 * Double(timer.delta)) + updateTextEffect() + } label: { + Image(systemName: "minus.circle") + .font(.title) + } + Button { + timer.add(delta: 60 * Double(timer.delta)) + updateTextEffect() + } label: { + Image(systemName: "plus.circle") + .font(.title) + } + Button { + presentingSetTime = true + } label: { + Image(systemName: "clock") + .font(.title) + } + .popover(isPresented: $presentingSetTime, arrowEdge: .bottom) { + TimePickerView(time: timer.timeLeft(), + onSet: { + timer.set(time: $0) + updateTextEffect() + presentingSetTime = false + }, + onCancel: { + presentingSetTime = false + }) + } + } + .buttonStyle(.borderless) + } + } + } +} + +struct StopwatchWidgetView: View { + private let name: String + @ObservedObject var stopwatch: SettingsWidgetTextStopwatch + private let index: Int + private let textEffects: [TextEffect] + private var indented: Bool + @State private var presentingSetTime: Bool = false + + init( + name: String, + stopwatch: SettingsWidgetTextStopwatch, + index: Int, + textEffects: [TextEffect], + indented: Bool + ) { + self.name = name + self.stopwatch = stopwatch + self.index = index + self.textEffects = textEffects + self.indented = indented + } + + private func updateTextEffect() { + for effect in textEffects { + effect.setStopwatch(index: index, stopwatch: stopwatch.clone()) + } + } + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + VStack(alignment: .leading) { + HStack { + Text(name) + Spacer() + } + HStack { + Spacer() + Button { + presentingSetTime = true + } label: { + Image(systemName: "clock") + .font(.title) + } + .popover(isPresented: $presentingSetTime, arrowEdge: .bottom) { + TimePickerView(time: stopwatch.currentTime(), + onSet: { + stopwatch.playPressedTime = .now + stopwatch.totalElapsed = $0 + updateTextEffect() + presentingSetTime = false + }, + onCancel: { + presentingSetTime = false + }) + } + Button { + stopwatch.totalElapsed = 0.0 + stopwatch.running = false + updateTextEffect() + } label: { + Image(systemName: "arrow.counterclockwise") + .font(.title) + } + Button { + stopwatch.running.toggle() + if stopwatch.running { + stopwatch.playPressedTime = .now + } else { + stopwatch.totalElapsed += stopwatch.playPressedTime.duration(to: .now).seconds + } + updateTextEffect() + } label: { + Image(systemName: stopwatch.running ? "stop" : "play") + .font(.title) + .frame(width: 35) + } + } + .buttonStyle(.borderless) + } + } + } +} + +struct CheckboxWidgetView: View { + private let name: String + private let checkbox: SettingsWidgetTextCheckbox + private let index: Int + private let textEffects: [TextEffect] + private var indented: Bool + @State var image: String + + init( + name: String, + checkbox: SettingsWidgetTextCheckbox, + index: Int, + textEffects: [TextEffect], + indented: Bool + ) { + self.name = name + self.checkbox = checkbox + self.index = index + self.textEffects = textEffects + self.indented = indented + image = checkbox.checked ? "checkmark.square" : "square" + } + + private func updateTextEffect() { + for effect in textEffects { + effect.setCheckbox(index: index, checked: checkbox.checked) + } + } + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + Text(name) + Spacer() + Button { + checkbox.checked = !checkbox.checked + image = checkbox.checked ? "checkmark.square" : "square" + updateTextEffect() + } label: { + Image(systemName: image) + .font(.title) + } + } + .buttonStyle(.borderless) + } +} + +struct RatingWidgetView: View { + private let name: String + private let rating: SettingsWidgetTextRating + private let index: Int + private let textEffects: [TextEffect] + private var indented: Bool + @State private var ratingSelection: Int + + init( + name: String, + rating: SettingsWidgetTextRating, + index: Int, + textEffects: [TextEffect], + indented: Bool + ) { + self.name = name + self.rating = rating + self.index = index + self.textEffects = textEffects + self.indented = indented + ratingSelection = rating.rating + } + + private func updateTextEffect() { + for effect in textEffects { + effect.setRating(index: index, rating: rating.rating) + } + } + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + Picker(selection: $ratingSelection) { + ForEach(0 ..< 6) { rating in + Text(String(rating)) + } + } label: { + Text(name) + } + .onChange(of: ratingSelection) { + rating.rating = $0 + updateTextEffect() + } + } + } +} + +struct LapTimesWidgetView: View { + private let name: String + private let lapTimes: SettingsWidgetTextLapTimes + private let index: Int + private let textEffects: [TextEffect] + private var indented: Bool + + init( + name: String, + lapTimes: SettingsWidgetTextLapTimes, + index: Int, + textEffects: [TextEffect], + indented: Bool + ) { + self.name = name + self.lapTimes = lapTimes + self.index = index + self.textEffects = textEffects + self.indented = indented + } + + private func updateTextEffect() { + for effect in textEffects { + effect.setLapTimes(index: index, lapTimes: lapTimes.lapTimes) + } + } + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + Text(name) + Spacer() + Button { + lapTimes.currentLapStartTime = nil + lapTimes.lapTimes = [] + updateTextEffect() + } label: { + Image(systemName: "trash") + .font(.title) + } + .padding([.trailing], 10) + Button { + let now = Date().timeIntervalSince1970 + let lastIndex = lapTimes.lapTimes.endIndex - 1 + if lastIndex >= 0, let currentLapStartTime = lapTimes.currentLapStartTime { + lapTimes.lapTimes[lastIndex] = now - currentLapStartTime + } + lapTimes.currentLapStartTime = now + lapTimes.lapTimes.append(0) + updateTextEffect() + } label: { + Image(systemName: "stopwatch") + .font(.title) + } + .padding([.trailing], 10) + Button { + if let currentLapStartTime = lapTimes.currentLapStartTime { + let lastIndex = lapTimes.lapTimes.endIndex - 1 + if lastIndex >= 0 { + let now = Date().timeIntervalSince1970 + lapTimes.lapTimes[lastIndex] = now - currentLapStartTime + } + lapTimes.currentLapStartTime = nil + lapTimes.lapTimes.append(.infinity) + } + updateTextEffect() + } label: { + Image(systemName: "flag.checkered") + .font(.title) + } + } + .buttonStyle(.borderless) + } +} + +struct WheelOfLuckWidgetView: View { + let model: Model + @ObservedObject var widget: SettingsWidget + let effect: WheelOfLuckEffect + let indented: Bool + + var body: some View { + HStack { + if indented { + Text("") + Text("").frame(width: iconWidth) + } + Spacer() + Button { + widget.wheelOfLuck.shuffle() + model.getWheelOfLuckEffect(id: widget.id)?.setSettings(settings: widget.wheelOfLuck) + + } label: { + Image(systemName: "shuffle") + .font(.title) + } + .padding([.trailing], 10) + Button { + effect.spin() + } label: { + Image(systemName: "play") + .font(.title) + } + } + .buttonStyle(.borderless) + } +} + +private struct WidgetTextView: View { + let model: Model + @ObservedObject var widget: SettingsWidget + @ObservedObject var text: SettingsWidgetText + + var body: some View { + let textEffects = model.getTextEffects(id: widget.id) + if !textEffects.isEmpty { + let textFormat = loadTextFormat(format: text.formatString) + ForEach(text.timers) { timer in + let index = text.timers.firstIndex(where: { $0 === timer }) ?? 0 + TimerWidgetView( + name: String(localized: "Timer \(index + 1)"), + timer: timer, + index: index, + textEffects: textEffects, + indented: true + ) + } + ForEach(text.stopwatches) { stopwatch in + let index = text.stopwatches.firstIndex(where: { $0 === stopwatch }) ?? 0 + StopwatchWidgetView( + name: String(localized: "Stopwatch \(index + 1)"), + stopwatch: stopwatch, + index: index, + textEffects: textEffects, + indented: true + ) + } + ForEach(text.checkboxes) { checkbox in + let index = text.checkboxes.firstIndex(where: { $0 === checkbox }) ?? 0 + CheckboxWidgetView( + name: textFormat.getCheckboxText(index: index), + checkbox: checkbox, + index: index, + textEffects: textEffects, + indented: true + ) + } + ForEach(text.ratings) { rating in + let index = text.ratings.firstIndex(where: { $0 === rating }) ?? 0 + RatingWidgetView( + name: String(localized: "Rating \(index + 1)"), + rating: rating, + index: index, + textEffects: textEffects, + indented: true + ) + } + ForEach(text.lapTimes) { lapTimes in + let index = text.lapTimes.firstIndex(where: { $0 === lapTimes }) ?? 0 + LapTimesWidgetView( + name: String(localized: "Lap times \(index + 1)"), + lapTimes: lapTimes, + index: index, + textEffects: textEffects, + indented: true + ) + } + } + } +} + +private struct WidgetWheelOfLuckView: View { + let model: Model + @ObservedObject var widget: SettingsWidget + + var body: some View { + if let effect = model.getWheelOfLuckEffect(id: widget.id) { + WheelOfLuckWidgetView(model: model, widget: widget, effect: effect, indented: true) + } + } +} + private struct WidgetView: View { let model: Model @ObservedObject var database: Database @@ -30,19 +545,13 @@ private struct WidgetView: View { } switch widget.type { case .text: - WidgetTextQuickButtonControlsView(model: model, - widget: widget, - text: widget.text) + WidgetTextView(model: model, widget: widget, text: widget.text) case .wheelOfLuck: - WidgetWheelOfLuckQuickButtonControlsView(model: model, widget: widget) + WidgetWheelOfLuckView(model: model, widget: widget) case .bingoCard: - WidgetBingoCardQuickButtonControlsView(bingoCard: widget.bingoCard) { + BingoCardCompactMarksView(bingoCard: widget.bingoCard) { model.getBingoCardEffect(id: widget.id)?.setSettings(settings: widget.bingoCard) } - case .scoreboard: - WidgetScoreboardQuickButtonControlsView(model: model, - widget: widget, - scoreboard: widget.scoreboard) default: EmptyView() } diff --git a/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorQuickButtonsSettingsView.swift b/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorQuickButtonsSettingsView.swift index c02ca6d66..47c1c8ef4 100644 --- a/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorQuickButtonsSettingsView.swift +++ b/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorQuickButtonsSettingsView.swift @@ -5,31 +5,19 @@ private struct DeepLinkCreatorQuickButtonSettingsView: View { @ObservedObject var button: DeepLinkCreatorQuickButton var body: some View { - HStack { - DraggableItemPrefixView() - if let quickButton = model.getQuickButton(type: button.type) { - VStack { - Toggle(isOn: $button.enabled) { - IconAndTextView( - image: quickButton.imageOff, - text: quickButton.name, - longDivider: true - ) - } - HStack { - Spacer() - if #available(iOS 17, *) { - Picker("Page", selection: $button.page) { - ForEach(1 ... controlBarPages, id: \.self) { page in - Text(String(page)) - } - } - .fixedSize() - } - } + Toggle(isOn: $button.enabled) { + HStack { + DraggableItemPrefixView() + if let quickButton = model.getQuickButton(type: button.type) { + IconAndTextView( + image: quickButton.imageOff, + text: quickButton.name, + longDivider: true + ) + } else { + Text("Unknown") } - } else { - Text("Unknown") + Spacer() } } } diff --git a/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorSettingsView.swift b/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorSettingsView.swift index c92a093ea..4eff045b0 100644 --- a/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorSettingsView.swift +++ b/Moblin/View/Settings/DeepLinkCreator/DeepLinkCreatorSettingsView.swift @@ -80,7 +80,6 @@ struct DeepLinkCreatorSettingsView: View { settings.quickButtons!.buttons = settings.quickButtons!.buttons ?? .init() let newButton = MoblinSettingsButton(type: button.type) newButton.enabled = true - newButton.page = button.page settings.quickButtons!.buttons!.append(newButton) } } @@ -153,10 +152,12 @@ struct DeepLinkCreatorSettingsView: View { } } Section { - if let image = generateQrCode(from: deepLink) { - QrCodeImageView(image: image, height: qrCodeHeight(metrics)) - } else { - Text("Failed to create QR-code.") + HCenter { + Image(uiImage: generateQrCode(from: deepLink)!) + .resizable() + .interpolation(.none) + .scaledToFit() + .frame(maxHeight: metrics.size.height) } } } else { diff --git a/Moblin/View/Settings/GoPro/GoProSettingsView.swift b/Moblin/View/Settings/GoPro/GoProSettingsView.swift index b4a362544..15ac5c716 100644 --- a/Moblin/View/Settings/GoPro/GoProSettingsView.swift +++ b/Moblin/View/Settings/GoPro/GoProSettingsView.swift @@ -1,14 +1,10 @@ import NetworkExtension import SwiftUI -func qrCodeHeight(_ metrics: GeometryProxy) -> Double { - return metrics.size.width * 0.5 -} - private struct GoProLaunchLiveStreamSettingsView: View { @ObservedObject var goPro: SettingsGoPro @ObservedObject var launchLiveStream: SettingsGoProLaunchLiveStream - @State private var qrCode: UIImage? + @State var qrCode: UIImage? private func generate() { qrCode = GoPro.generateLaunchLiveStream(isHero12Or13: launchLiveStream.isHero12Or13, @@ -39,7 +35,7 @@ private struct GoProLaunchLiveStreamSettingsView: View { } if let qrCode { Section { - QrCodeImageView(image: qrCode, height: qrCodeHeight(metrics)) + QrCodeImageView(image: qrCode, height: metrics.size.height) } } } @@ -118,7 +114,7 @@ private struct GoProWifiCredentialsSettingsView: View { } if let qrCode { Section { - QrCodeImageView(image: qrCode, height: qrCodeHeight(metrics)) + QrCodeImageView(image: qrCode, height: metrics.size.height) } } } @@ -274,7 +270,7 @@ private struct GoProRtmpUrlSettingsView: View { } if let qrCode { Section { - QrCodeImageView(image: qrCode, height: qrCodeHeight(metrics)) + QrCodeImageView(image: qrCode, height: metrics.size.height) } } } diff --git a/Moblin/View/Settings/Ingests/IngestsSettingsView.swift b/Moblin/View/Settings/Ingests/IngestsSettingsView.swift index f0f8b9ff7..14ff283ae 100644 --- a/Moblin/View/Settings/Ingests/IngestsSettingsView.swift +++ b/Moblin/View/Settings/Ingests/IngestsSettingsView.swift @@ -8,9 +8,11 @@ struct IngestsSettingsView: View { Form { Section { RtmpServerSettingsView(rtmpServer: database.rtmpServer) + WhipServerSettingsView(whipServer: database.whipServer) SrtlaServerSettingsView(srtlaServer: database.srtlaServer) RistServerSettingsView(ristServer: database.ristServer) RtspClientSettingsView(rtspClient: database.rtspClient) + WhepClientSettingsView(whepClient: database.whepClient) if #available(iOS 26, *), false { NavigationLink { WiFiAwareSettingsView(model: model, wiFiAware: database.wiFiAware) diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/BingoCard/WidgetBingoCardSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/BingoCard/WidgetBingoCardSettingsView.swift index add6f7b3a..b7cfd2aa8 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/BingoCard/WidgetBingoCardSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/BingoCard/WidgetBingoCardSettingsView.swift @@ -38,15 +38,15 @@ struct BingoCardMarksView: View { } } -struct WidgetBingoCardQuickButtonControlsView: View { +struct BingoCardCompactMarksView: View { @ObservedObject var bingoCard: SettingsWidgetBingoCard let updateEffect: () -> Void var body: some View { let squaresCountSide = bingoCard.size() - VStack(spacing: 13) { + VStack(spacing: 9) { ForEach(0 ..< squaresCountSide, id: \.self) { row in - HStack(spacing: 13) { + HStack { Spacer() ForEach(0 ..< squaresCountSide, id: \.self) { column in let index = row * squaresCountSide + column @@ -63,7 +63,6 @@ struct WidgetBingoCardQuickButtonControlsView: View { .buttonStyle(.borderless) } else { Image(systemName: "square") - .foregroundColor(.gray) .font(.title) } } diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Browser/WidgetBrowserSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Browser/WidgetBrowserSettingsView.swift index 9afffd773..d19f1d23a 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Browser/WidgetBrowserSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Browser/WidgetBrowserSettingsView.swift @@ -88,25 +88,11 @@ struct WidgetBrowserSettingsView: View { keyboardType: .numbersAndPunctuation) } Section { - NavigationLink { - InlinePickerView( - title: "Mode", - onChange: { value in - browser.mode = SettingsWidgetBrowserMode(rawValue: value) ?? .periodicAudioAndVideo - model.resetSelectedScene(changeScene: false) - }, - items: - SettingsWidgetBrowserMode.allCases.map { .init(id: $0.rawValue, text: $0.toString()) }, - selectedId: browser.mode.rawValue - ) - } label: { - HStack { - Text("Mode") - Spacer() - Text(browser.mode.toString()).foregroundStyle(.gray) + Toggle("Audio and video only", isOn: $browser.audioAndVideoOnly) + .onChange(of: browser.audioAndVideoOnly) { _ in + model.resetSelectedScene(changeScene: false) } - } - if browser.mode == .periodicAudioAndVideo { + if !browser.audioAndVideoOnly { HStack { Text("Base FPS") SliderView( @@ -121,18 +107,11 @@ struct WidgetBrowserSettingsView: View { } } } footer: { - VStack(alignment: .leading) { - Text(""" - When \"Audio and video only\" mode is selected, images, text, GIFs etc. \ - will only be shown when a video (.mp4/.mov) is playing, reducing overall \ - energy consumption. - """) - Text("") - Text(""" - When \"Audio only\" mode is selected, no video will be rendered at all. \ - Only audio will play. - """) - } + Text(""" + When \"Audio and video only\" is enabled, images, text, GIFs etc. \ + will only be shown when a video (.mp4/.mov) is playing, reducing overall \ + energy consumption. + """) } Section { Toggle("Moblin access", isOn: $browser.moblinAccess) diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardGenericSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardGenericSettingsView.swift index c7344a9a2..dde1b9c43 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardGenericSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardGenericSettingsView.swift @@ -1,184 +1,26 @@ import SwiftUI -private struct TimePickerView: View { - let model: Model - let widget: SettingsWidget - var clock: SettingsWidgetScoreboardClock - @Binding var presenting: Bool - @State private var minutes: Int = 0 - @State private var seconds: Int = 0 - - var body: some View { - VStack { - HStack { - TimeComponentPickerView(title: "Minutes", range: 0 ..< 120, time: $minutes) - TimeComponentPickerView(title: "Seconds", range: 0 ..< 60, time: $seconds) - } - .padding() - HStack { - TimeButtonView(text: "Set") { - model.handleUpdateGenericScoreboard(action: .init( - id: widget.id, - action: .setClock(minutes: minutes, seconds: seconds) - )) - presenting = false - } - TimeButtonView(text: "Cancel") { - presenting = false - } - } - .buttonStyle(.borderedProminent) - .padding() - } - .padding() - .onAppear { - minutes = clock.minutes - seconds = clock.seconds - } - } -} - -private struct ScoreboardSetClockButtonView: View { - let model: Model - let widget: SettingsWidget - @ObservedObject var clock: SettingsWidgetScoreboardClock - @State private var presenting: Bool = false - - var body: some View { - Button { - presenting = true - } label: { - Image(systemName: "clock") - .font(.title) - } - .buttonStyle(.borderless) - .popover(isPresented: $presenting) { - TimePickerView(model: model, - widget: widget, - clock: clock, - presenting: $presenting) - } - } -} - -private struct ScoreboardStartStopClockButtonView: View { - @ObservedObject var clock: SettingsWidgetScoreboardClock - - var body: some View { - Button { - clock.isStopped.toggle() - } label: { - Image(systemName: clock.isStopped ? "play" : "stop") - .font(.title) - } - .buttonStyle(.borderless) - } -} - -struct ScoreboardUndoButtonView: View { - let action: () -> Void - - var body: some View { - Button { - action() - } label: { - Image(systemName: "arrow.uturn.backward") - .font(.title) - } - .buttonStyle(.borderless) - } -} - -struct ScoreboardIncrementButtonView: View { - let action: () -> Void - - var body: some View { - Button { - action() - } label: { - Image(systemName: "plus") - .font(.title) - } - .buttonStyle(.borderless) - } -} - -struct ScoreboardResetScoreButtonView: View { - let action: () -> Void - @State private var presentingResetConfirimation = false - - var body: some View { - Button { - presentingResetConfirimation = true - } label: { - Image(systemName: "trash") - .font(.title) - } - .buttonStyle(.borderless) - .tint(.red) - .confirmationDialog("", isPresented: $presentingResetConfirimation) { - Button("Reset score", role: .destructive) { - action() - } - } - } -} - -struct WidgetScoreboardGenericQuickButtonControlsView: View { - let model: Model - let widget: SettingsWidget - - var body: some View { - HStack(spacing: 13) { - Spacer() - VStack(spacing: 13) { - ScoreboardStartStopClockButtonView(clock: widget.scoreboard.generic.clock) - ScoreboardSetClockButtonView(model: model, - widget: widget, - clock: widget.scoreboard.generic.clock) - } - Divider() - VStack(spacing: 13) { - ScoreboardUndoButtonView { - model.handleUpdateGenericScoreboard(action: .init(id: widget.id, action: .undo)) - } - ScoreboardResetScoreButtonView { - model.handleUpdateGenericScoreboard(action: .init(id: widget.id, action: .reset)) - } - } - VStack(spacing: 13) { - ScoreboardIncrementButtonView { - model.handleUpdateGenericScoreboard(action: .init(id: widget.id, action: .incrementHome)) - } - ScoreboardIncrementButtonView { - model.handleUpdateGenericScoreboard(action: .init(id: widget.id, action: .incrementAway)) - } - } - } - } -} - struct WidgetScoreboardGenericGeneralSettingsView: View { + let model: Model @ObservedObject var widget: SettingsWidget let scoreboard: SettingsWidgetScoreboard @ObservedObject var generic: SettingsWidgetGenericScoreboard - let updated: () -> Void var body: some View { TextEditNavigationView(title: String(localized: "Title"), value: generic.title) { title in generic.title = title } .onChange(of: generic.title) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } - ScoreboardColorsView(scoreboard: scoreboard, updated: updated) + ScoreboardColorsView(model: model, widget: widget, scoreboard: scoreboard) } } struct WidgetScoreboardGenericSettingsView: View { + let model: Model @ObservedObject var generic: SettingsWidgetGenericScoreboard @ObservedObject var clock: SettingsWidgetScoreboardClock - let updated: () -> Void private func isValidClockMaximum(value: String) -> String? { guard let maximum = Int(value) else { @@ -213,13 +55,13 @@ struct WidgetScoreboardGenericSettingsView: View { generic.home = home } .onChange(of: generic.home) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } TextEditNavigationView(title: String(localized: "Away"), value: generic.away) { away in generic.away = away } .onChange(of: generic.away) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } } header: { Text("Teams") @@ -232,7 +74,7 @@ struct WidgetScoreboardGenericSettingsView: View { valueFormat: formatMaximum) .onChange(of: clock.maximum) { _ in clock.reset() - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } Picker("Direction", selection: $clock.direction) { ForEach(SettingsWidgetGenericScoreboardClockDirection.allCases, id: \.self) { direction in @@ -241,7 +83,7 @@ struct WidgetScoreboardGenericSettingsView: View { } .onChange(of: clock.direction) { _ in clock.reset() - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } } header: { Text("Clock") diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardModularSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardModularSettingsView.swift index bf1c5d82c..80c2bc8cc 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardModularSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardModularSettingsView.swift @@ -1,9 +1,14 @@ import SwiftUI private struct TeamView: View { + let model: Model + let widget: SettingsWidget let side: String @ObservedObject var team: SettingsWidgetModularScoreboardTeam - let updated: () -> Void + + private func updateEffect() { + model.updateScoreboardEffect(widget: widget) + } var body: some View { NavigationLink { @@ -11,21 +16,24 @@ private struct TeamView: View { Section { TextEditNavigationView(title: String(localized: "Name"), value: team.name) { team.name = $0 - updated() + model.remoteControlScoreboardUpdate() + model.sceneUpdated() } ColorPicker("Text", selection: $team.textColorColor, supportsOpacity: false) .onChange(of: team.textColorColor) { if let rgb = $0.toRgb() { team.textColor = rgb } - updated() + model.remoteControlScoreboardUpdate() + updateEffect() } ColorPicker("Background", selection: $team.backgroundColorColor, supportsOpacity: false) .onChange(of: team.backgroundColorColor) { if let rgb = $0.toRgb() { team.backgroundColor = rgb } - updated() + model.remoteControlScoreboardUpdate() + updateEffect() } } } @@ -42,9 +50,10 @@ private struct TeamView: View { } struct WidgetScoreboardModularSettingsView: View { + let model: Model + let widget: SettingsWidget @ObservedObject var modular: SettingsWidgetModularScoreboard @ObservedObject var clock: SettingsWidgetScoreboardClock - let updated: () -> Void private func isValidClockMaximum(value: String) -> String? { guard let maximum = Int(value) else { @@ -75,8 +84,8 @@ struct WidgetScoreboardModularSettingsView: View { var body: some View { Section { - TeamView(side: String(localized: "Home"), team: modular.home, updated: updated) - TeamView(side: String(localized: "Away"), team: modular.away, updated: updated) + TeamView(model: model, widget: widget, side: String(localized: "Home"), team: modular.home) + TeamView(model: model, widget: widget, side: String(localized: "Away"), team: modular.away) } header: { Text("Teams") } @@ -88,7 +97,8 @@ struct WidgetScoreboardModularSettingsView: View { valueFormat: formatMaximum) .onChange(of: clock.maximum) { _ in clock.reset() - updated() + model.remoteControlScoreboardUpdate() + model.sceneUpdated() } Picker("Direction", selection: $clock.direction) { ForEach(SettingsWidgetGenericScoreboardClockDirection.allCases, id: \.self) { direction in @@ -97,7 +107,8 @@ struct WidgetScoreboardModularSettingsView: View { } .onChange(of: clock.direction) { _ in clock.reset() - updated() + model.remoteControlScoreboardUpdate() + model.sceneUpdated() } } header: { Text("Clock") @@ -106,8 +117,13 @@ struct WidgetScoreboardModularSettingsView: View { } struct WidgetScoreboardModularGeneralSettingsView: View { + let model: Model + let widget: SettingsWidget @ObservedObject var modular: SettingsWidgetModularScoreboard - let updated: () -> Void + + private func updateEffect() { + model.updateScoreboardEffect(widget: widget) + } var body: some View { NavigationLink("Layout") { @@ -119,7 +135,8 @@ struct WidgetScoreboardModularGeneralSettingsView: View { } } .onChange(of: modular.layout) { _ in - updated() + model.updateScoreboardEffect(widget: widget) + model.remoteControlScoreboardUpdate() } } Section { @@ -127,7 +144,7 @@ struct WidgetScoreboardModularGeneralSettingsView: View { Text("Width") Slider(value: $modular.width, in: 100 ... 1000) .onChange(of: modular.width) { _ in - updated() + updateEffect() } Text(String(Int(modular.width))) .frame(width: 35) @@ -136,7 +153,7 @@ struct WidgetScoreboardModularGeneralSettingsView: View { Text("Height") Slider(value: $modular.rowHeight, in: 10 ... 150) .onChange(of: modular.rowHeight) { _ in - updated() + updateEffect() } Text(String(Int(modular.rowHeight))) .frame(width: 35) @@ -145,19 +162,19 @@ struct WidgetScoreboardModularGeneralSettingsView: View { Section { Toggle("Title", isOn: $modular.showTitle) .onChange(of: modular.showTitle) { _ in - updated() - } - Toggle("More stats", isOn: $modular.showMoreStats) - .onChange(of: modular.showMoreStats) { _ in - updated() + updateEffect() } Toggle("Info box", isOn: $modular.showGlobalStatsBlock) .onChange(of: modular.showGlobalStatsBlock) { _ in - updated() + updateEffect() + } + Toggle("More stats", isOn: $modular.showMoreStats) + .onChange(of: modular.showMoreStats) { _ in + updateEffect() } Toggle("Bold", isOn: $modular.isBold) .onChange(of: modular.isBold) { _ in - updated() + updateEffect() } } } diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardPadelSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardPadelSettingsView.swift index b5aeb32d1..f5be3454e 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardPadelSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardPadelSettingsView.swift @@ -8,7 +8,7 @@ private struct PlayersPlayerView: View { var body: some View { NameEditView(name: $player.name, existingNames: database.scoreboardPlayers) .onChange(of: player.name) { _ in - model.sceneUpdated() + model.resetSelectedScene(changeScene: false, attachCamera: false) model.sendScoreboardPlayersToWatch() } } @@ -17,7 +17,6 @@ private struct PlayersPlayerView: View { private struct PlayersView: View { @EnvironmentObject var model: Model @ObservedObject var database: Database - let updated: () -> Void var body: some View { Section { @@ -27,12 +26,12 @@ private struct PlayersView: View { } .onMove { froms, to in database.scoreboardPlayers.move(fromOffsets: froms, toOffset: to) - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) model.sendScoreboardPlayersToWatch() } .onDelete { offsets in database.scoreboardPlayers.remove(atOffsets: offsets) - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) model.sendScoreboardPlayersToWatch() } } @@ -71,40 +70,11 @@ private struct PlayerView: View { } } -struct WidgetScoreboardPadelQuickButtonControlsView: View { - let model: Model - let widget: SettingsWidget - - var body: some View { - VStack(spacing: 13) { - HStack(spacing: 13) { - Spacer() - ScoreboardUndoButtonView { - model.handleUpdatePadelScoreboard(action: .init(id: widget.id, action: .undo)) - } - ScoreboardIncrementButtonView { - model.handleUpdatePadelScoreboard(action: .init(id: widget.id, action: .incrementHome)) - } - } - HStack(spacing: 13) { - Spacer() - ScoreboardResetScoreButtonView { - model.handleUpdatePadelScoreboard(action: .init(id: widget.id, action: .reset)) - } - ScoreboardIncrementButtonView { - model.handleUpdatePadelScoreboard(action: .init(id: widget.id, action: .incrementAway)) - } - } - } - .font(.title) - } -} - struct WidgetScoreboardPadelGeneralSettingsView: View { + let model: Model @ObservedObject var widget: SettingsWidget @ObservedObject var scoreboard: SettingsWidgetScoreboard @ObservedObject var padel: SettingsWidgetPadelScoreboard - let updated: () -> Void var body: some View { HStack { @@ -117,28 +87,27 @@ struct WidgetScoreboardPadelGeneralSettingsView: View { } } .onChange(of: padel.type) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } } - ScoreboardColorsView(scoreboard: scoreboard, updated: updated) + ScoreboardColorsView(model: model, widget: widget, scoreboard: scoreboard) } } struct WidgetScoreboardPadelSettingsView: View { let model: Model @ObservedObject var padel: SettingsWidgetPadelScoreboard - let updated: () -> Void var body: some View { Section { PlayerView(playerId: $padel.homePlayer1) .onChange(of: padel.homePlayer1) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } if padel.type == .doubles { PlayerView(playerId: $padel.homePlayer2) .onChange(of: padel.homePlayer2) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } } } header: { @@ -147,17 +116,17 @@ struct WidgetScoreboardPadelSettingsView: View { Section { PlayerView(playerId: $padel.awayPlayer1) .onChange(of: padel.awayPlayer1) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } if padel.type == .doubles { PlayerView(playerId: $padel.awayPlayer2) .onChange(of: padel.awayPlayer2) { _ in - updated() + model.resetSelectedScene(changeScene: false, attachCamera: false) } } } header: { Text("Away") } - PlayersView(database: model.database, updated: updated) + PlayersView(database: model.database) } } diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardSettingsView.swift index a87246c80..816b5164d 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Scoreboard/WidgetScoreboardSettingsView.swift @@ -1,25 +1,13 @@ import SwiftUI -struct WidgetScoreboardQuickButtonControlsView: View { +struct ScoreboardColorsView: View { let model: Model let widget: SettingsWidget @ObservedObject var scoreboard: SettingsWidgetScoreboard - var body: some View { - switch scoreboard.sport { - case .generic: - WidgetScoreboardGenericQuickButtonControlsView(model: model, widget: widget) - case .padel: - WidgetScoreboardPadelQuickButtonControlsView(model: model, widget: widget) - default: - EmptyView() - } + private func updateEffect() { + model.updateScoreboardEffect(widget: widget) } -} - -struct ScoreboardColorsView: View { - @ObservedObject var scoreboard: SettingsWidgetScoreboard - let updated: () -> Void var body: some View { NavigationLink { @@ -30,7 +18,7 @@ struct ScoreboardColorsView: View { if let color = scoreboard.textColorColor.toRgb() { scoreboard.textColor = color } - updated() + updateEffect() } ColorPicker("Primary background", selection: $scoreboard.primaryBackgroundColorColor, @@ -39,7 +27,7 @@ struct ScoreboardColorsView: View { if let color = scoreboard.primaryBackgroundColorColor.toRgb() { scoreboard.primaryBackgroundColor = color } - updated() + updateEffect() } ColorPicker("Secondary background", selection: $scoreboard.secondaryBackgroundColorColor, @@ -48,13 +36,13 @@ struct ScoreboardColorsView: View { if let color = scoreboard.secondaryBackgroundColorColor.toRgb() { scoreboard.secondaryBackgroundColor = color } - updated() + updateEffect() } } Section { TextButtonView("Reset") { scoreboard.resetColors() - updated() + updateEffect() } } } @@ -71,24 +59,6 @@ struct WidgetScoreboardSettingsView: View { @ObservedObject var scoreboard: SettingsWidgetScoreboard @ObservedObject var web: SettingsRemoteControlWeb - private func updated() { - switch scoreboard.sport { - case .generic: - model.sendUpdateGenericScoreboardToWatch(id: widget.id, generic: scoreboard.generic) - case .padel: - model.sendUpdatePadelScoreboardToWatch(id: widget.id, padel: scoreboard.padel) - default: - break - } - model.remoteControlScoreboardUpdate(scoreboard: scoreboard) - model.getScoreboardEffect(id: widget.id)? - .update( - scoreboard: scoreboard, - config: model.getModularScoreboardConfig(scoreboard: scoreboard), - players: model.database.scoreboardPlayers - ) - } - var body: some View { Section { Picker("Sport", selection: $scoreboard.sport) { @@ -98,22 +68,24 @@ struct WidgetScoreboardSettingsView: View { } .onChange(of: scoreboard.sport) { _ in scoreboard.modular.config = nil - updated() + model.remoteControlScoreboardUpdate() + model.resetSelectedScene(changeScene: false, attachCamera: false) } switch scoreboard.sport { case .padel: - WidgetScoreboardPadelGeneralSettingsView(widget: widget, + WidgetScoreboardPadelGeneralSettingsView(model: model, + widget: widget, scoreboard: scoreboard, - padel: scoreboard.padel, - updated: updated) + padel: scoreboard.padel) case .generic: - WidgetScoreboardGenericGeneralSettingsView(widget: widget, + WidgetScoreboardGenericGeneralSettingsView(model: model, + widget: widget, scoreboard: scoreboard, - generic: scoreboard.generic, - updated: updated) + generic: scoreboard.generic) default: - WidgetScoreboardModularGeneralSettingsView(modular: scoreboard.modular, - updated: updated) + WidgetScoreboardModularGeneralSettingsView(model: model, + widget: widget, + modular: scoreboard.modular) } } Section { @@ -137,15 +109,16 @@ struct WidgetScoreboardSettingsView: View { } switch scoreboard.sport { case .padel: - WidgetScoreboardPadelSettingsView(model: model, padel: scoreboard.padel, updated: updated) + WidgetScoreboardPadelSettingsView(model: model, padel: scoreboard.padel) case .generic: - WidgetScoreboardGenericSettingsView(generic: scoreboard.generic, - clock: scoreboard.generic.clock, - updated: updated) + WidgetScoreboardGenericSettingsView(model: model, + generic: scoreboard.generic, + clock: scoreboard.generic.clock) default: - WidgetScoreboardModularSettingsView(modular: scoreboard.modular, - clock: scoreboard.modular.clock, - updated: updated) + WidgetScoreboardModularSettingsView(model: model, + widget: widget, + modular: scoreboard.modular, + clock: scoreboard.modular.clock) } } } diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/Text/WidgetTextSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/Text/WidgetTextSettingsView.swift index 0674827cb..c4a01597f 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/Text/WidgetTextSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/Text/WidgetTextSettingsView.swift @@ -93,6 +93,37 @@ private struct SuggestionView: View { } } +private struct TextWidgetSuggestionsInnerView: View { + @Environment(\.dismiss) var dismiss + @Binding var text: String + + var body: some View { + Form { + Section { + ForEach(suggestions) { suggestion in + SuggestionView(suggestion: suggestion, text: $text) { + dismiss() + } + .tag(suggestion.id) + } + } + } + .navigationTitle("Suggestions") + } +} + +struct TextWidgetSuggestionsView: View { + @Binding var text: String + + var body: some View { + NavigationLink { + TextWidgetSuggestionsInnerView(text: $text) + } label: { + Text("Suggestions") + } + } +} + private struct VariableView: View { @EnvironmentObject var model: Model let title: String @@ -192,432 +223,6 @@ private struct SubtitlesWithLanguageView: View { } } -struct TimeComponentPickerView: View { - let title: LocalizedStringKey - let range: Range - @Binding var time: Int - - var body: some View { - VStack { - Text(title) - Picker("", selection: $time) { - ForEach(range, id: \.self) { - Text(String($0)) - } - } - .pickerStyle(.wheel) - .frame(width: 100, height: 150) - } - } -} - -struct TimeButtonView: View { - let text: LocalizedStringKey - let action: () -> Void - - var body: some View { - Button { - action() - } label: { - Text(text) - .frame(width: 100, height: 30) - } - } -} - -private struct TimePickerView: View { - @State private var hours: Int - @State private var minutes: Int - @State private var seconds: Int - private let onSet: (Double) -> Void - private let onCancel: () -> Void - - init(time: Double, onSet: @escaping (Double) -> Void, onCancel: @escaping () -> Void) { - let time = Int(time) - seconds = time % 60 - minutes = (time / 60) % 60 - hours = min(time / 3600, 23) - self.onSet = onSet - self.onCancel = onCancel - } - - var body: some View { - VStack { - HStack { - TimeComponentPickerView(title: "Hours", range: 0 ..< 24, time: $hours) - TimeComponentPickerView(title: "Minutes", range: 0 ..< 60, time: $minutes) - TimeComponentPickerView(title: "Seconds", range: 0 ..< 60, time: $seconds) - } - .padding() - HStack { - TimeButtonView(text: "Set") { - onSet(Double(hours * 3600 + minutes * 60 + seconds)) - } - TimeButtonView(text: "Cancel") { - onCancel() - } - } - .buttonStyle(.borderedProminent) - .padding() - } - .padding() - } -} - -private struct TimerWidgetView: View { - let name: String - @ObservedObject var timer: SettingsWidgetTextTimer - let index: Int - let textEffects: [TextEffect] - let indented: Bool - @State private var presentingSetTime: Bool = false - - private func updateTextEffect() { - for effect in textEffects { - effect.setEndTime(index: index, endTime: timer.textEffectEndTime()) - } - } - - var body: some View { - HStack { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - VStack(alignment: .leading) { - HStack { - Text(name) - Spacer() - Text(timer.format()) - } - HStack(spacing: 13) { - Picker("", selection: $timer.delta) { - ForEach([1, 2, 5, 15, 60], id: \.self) { delta in - Text("\(delta) min") - .tag(delta) - } - } - Button { - timer.add(delta: -60 * Double(timer.delta)) - updateTextEffect() - } label: { - Image(systemName: "minus") - .font(.title) - } - Button { - timer.add(delta: 60 * Double(timer.delta)) - updateTextEffect() - } label: { - Image(systemName: "plus") - .font(.title) - } - Button { - presentingSetTime = true - } label: { - Image(systemName: "clock") - .font(.title) - } - .popover(isPresented: $presentingSetTime) { - TimePickerView(time: timer.timeLeft(), - onSet: { - timer.set(time: $0) - updateTextEffect() - presentingSetTime = false - }, - onCancel: { - presentingSetTime = false - }) - } - } - .buttonStyle(.borderless) - } - } - } -} - -private struct StopwatchWidgetView: View { - private let name: String - @ObservedObject var stopwatch: SettingsWidgetTextStopwatch - private let index: Int - private let textEffects: [TextEffect] - private var indented: Bool - @State private var presentingSetTime: Bool = false - - init( - name: String, - stopwatch: SettingsWidgetTextStopwatch, - index: Int, - textEffects: [TextEffect], - indented: Bool - ) { - self.name = name - self.stopwatch = stopwatch - self.index = index - self.textEffects = textEffects - self.indented = indented - } - - private func updateTextEffect() { - for effect in textEffects { - effect.setStopwatch(index: index, stopwatch: stopwatch.clone()) - } - } - - var body: some View { - HStack { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - VStack(alignment: .leading) { - HStack { - Text(name) - Spacer() - } - HStack(spacing: 13) { - Spacer() - Button { - presentingSetTime = true - } label: { - Image(systemName: "clock") - .font(.title) - } - .popover(isPresented: $presentingSetTime) { - TimePickerView(time: stopwatch.currentTime(), - onSet: { - stopwatch.playPressedTime = .now - stopwatch.totalElapsed = $0 - updateTextEffect() - presentingSetTime = false - }, - onCancel: { - presentingSetTime = false - }) - } - Button { - stopwatch.totalElapsed = 0.0 - stopwatch.running = false - updateTextEffect() - } label: { - Image(systemName: "arrow.counterclockwise") - .font(.title) - } - Button { - stopwatch.running.toggle() - if stopwatch.running { - stopwatch.playPressedTime = .now - } else { - stopwatch.totalElapsed += stopwatch.playPressedTime.duration(to: .now).seconds - } - updateTextEffect() - } label: { - Image(systemName: stopwatch.running ? "stop" : "play") - .font(.title) - .frame(width: 35) - } - } - .buttonStyle(.borderless) - } - } - } -} - -private struct CheckboxWidgetView: View { - private let name: String - private let checkbox: SettingsWidgetTextCheckbox - private let index: Int - private let textEffects: [TextEffect] - private var indented: Bool - @State var image: String - - init( - name: String, - checkbox: SettingsWidgetTextCheckbox, - index: Int, - textEffects: [TextEffect], - indented: Bool - ) { - self.name = name - self.checkbox = checkbox - self.index = index - self.textEffects = textEffects - self.indented = indented - image = checkbox.checked ? "checkmark.square" : "square" - } - - private func updateTextEffect() { - for effect in textEffects { - effect.setCheckbox(index: index, checked: checkbox.checked) - } - } - - var body: some View { - HStack { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - Text(name) - Spacer() - Button { - checkbox.checked = !checkbox.checked - image = checkbox.checked ? "checkmark.square" : "square" - updateTextEffect() - } label: { - Image(systemName: image) - .font(.title) - } - } - .buttonStyle(.borderless) - } -} - -private struct RatingWidgetView: View { - private let name: String - private let rating: SettingsWidgetTextRating - private let index: Int - private let textEffects: [TextEffect] - private var indented: Bool - @State private var ratingSelection: Int - - init( - name: String, - rating: SettingsWidgetTextRating, - index: Int, - textEffects: [TextEffect], - indented: Bool - ) { - self.name = name - self.rating = rating - self.index = index - self.textEffects = textEffects - self.indented = indented - ratingSelection = rating.rating - } - - private func updateTextEffect() { - for effect in textEffects { - effect.setRating(index: index, rating: rating.rating) - } - } - - var body: some View { - HStack { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - Picker(selection: $ratingSelection) { - ForEach(0 ..< 6) { rating in - Text(String(rating)) - } - } label: { - Text(name) - } - .onChange(of: ratingSelection) { - rating.rating = $0 - updateTextEffect() - } - } - } -} - -private struct LapTimesWidgetView: View { - private let name: String - private let lapTimes: SettingsWidgetTextLapTimes - private let index: Int - private let textEffects: [TextEffect] - private var indented: Bool - - init( - name: String, - lapTimes: SettingsWidgetTextLapTimes, - index: Int, - textEffects: [TextEffect], - indented: Bool - ) { - self.name = name - self.lapTimes = lapTimes - self.index = index - self.textEffects = textEffects - self.indented = indented - } - - private func updateTextEffect() { - for effect in textEffects { - effect.setLapTimes(index: index, lapTimes: lapTimes.lapTimes) - } - } - - var body: some View { - HStack(spacing: 13) { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - Text(name) - Spacer() - Button { - lapTimes.currentLapStartTime = nil - lapTimes.lapTimes = [] - updateTextEffect() - } label: { - Image(systemName: "trash") - .font(.title) - .tint(.red) - } - Button { - let now = Date().timeIntervalSince1970 - let lastIndex = lapTimes.lapTimes.endIndex - 1 - if lastIndex >= 0, let currentLapStartTime = lapTimes.currentLapStartTime { - lapTimes.lapTimes[lastIndex] = now - currentLapStartTime - } - lapTimes.currentLapStartTime = now - lapTimes.lapTimes.append(0) - updateTextEffect() - } label: { - Image(systemName: "stopwatch") - .font(.title) - } - Button { - if let currentLapStartTime = lapTimes.currentLapStartTime { - let lastIndex = lapTimes.lapTimes.endIndex - 1 - if lastIndex >= 0 { - let now = Date().timeIntervalSince1970 - lapTimes.lapTimes[lastIndex] = now - currentLapStartTime - } - lapTimes.currentLapStartTime = nil - lapTimes.lapTimes.append(.infinity) - } - updateTextEffect() - } label: { - Image(systemName: "flag.checkered") - .font(.title) - } - } - .buttonStyle(.borderless) - } -} - -private struct TextWidgetSuggestionsInnerView: View { - @Environment(\.dismiss) var dismiss - @Binding var text: String - - var body: some View { - Form { - Section { - ForEach(suggestions) { suggestion in - SuggestionView(suggestion: suggestion, text: $text) { - dismiss() - } - .tag(suggestion.id) - } - } - } - .navigationTitle("Suggestions") - } -} - private struct GeneralVariablesView: View { @Binding var value: String @@ -917,6 +522,23 @@ private struct DebugVariablesView: View { } } +struct TextWidgetTextView: View { + @Binding var value: String + @FocusState private var editingText: Bool + + var body: some View { + Section { + MultiLineTextFieldView(value: $value) + .keyboardType(.default) + .textInputAutocapitalization(.never) + .autocorrectionDisabled() + .focused($editingText) + } footer: { + MultiLineTextFieldDoneButtonView(editingText: $editingText) + } + } +} + private struct TextSelectionView: View { @EnvironmentObject var model: Model @Environment(\.dismiss) var dismiss @@ -951,98 +573,6 @@ private struct TextSelectionView: View { } } -struct TextWidgetTextView: View { - @Binding var value: String - @FocusState private var editingText: Bool - - var body: some View { - Section { - MultiLineTextFieldView(value: $value) - .keyboardType(.default) - .textInputAutocapitalization(.never) - .autocorrectionDisabled() - .focused($editingText) - } footer: { - MultiLineTextFieldDoneButtonView(editingText: $editingText) - } - } -} - -struct WidgetTextQuickButtonControlsView: View { - let model: Model - @ObservedObject var widget: SettingsWidget - @ObservedObject var text: SettingsWidgetText - - var body: some View { - let textEffects = model.getTextEffects(id: widget.id) - if !textEffects.isEmpty { - let textFormat = loadTextFormat(format: text.formatString) - ForEach(text.timers) { timer in - let index = text.timers.firstIndex(where: { $0 === timer }) ?? 0 - TimerWidgetView( - name: String(localized: "Timer \(index + 1)"), - timer: timer, - index: index, - textEffects: textEffects, - indented: true - ) - } - ForEach(text.stopwatches) { stopwatch in - let index = text.stopwatches.firstIndex(where: { $0 === stopwatch }) ?? 0 - StopwatchWidgetView( - name: String(localized: "Stopwatch \(index + 1)"), - stopwatch: stopwatch, - index: index, - textEffects: textEffects, - indented: true - ) - } - ForEach(text.checkboxes) { checkbox in - let index = text.checkboxes.firstIndex(where: { $0 === checkbox }) ?? 0 - CheckboxWidgetView( - name: textFormat.getCheckboxText(index: index), - checkbox: checkbox, - index: index, - textEffects: textEffects, - indented: true - ) - } - ForEach(text.ratings) { rating in - let index = text.ratings.firstIndex(where: { $0 === rating }) ?? 0 - RatingWidgetView( - name: String(localized: "Rating \(index + 1)"), - rating: rating, - index: index, - textEffects: textEffects, - indented: true - ) - } - ForEach(text.lapTimes) { lapTimes in - let index = text.lapTimes.firstIndex(where: { $0 === lapTimes }) ?? 0 - LapTimesWidgetView( - name: String(localized: "Lap times \(index + 1)"), - lapTimes: lapTimes, - index: index, - textEffects: textEffects, - indented: true - ) - } - } - } -} - -struct TextWidgetSuggestionsView: View { - @Binding var text: String - - var body: some View { - NavigationLink { - TextWidgetSuggestionsInnerView(text: $text) - } label: { - Text("Suggestions") - } - } -} - struct WidgetTextSettingsView: View { @EnvironmentObject var model: Model let widget: SettingsWidget diff --git a/Moblin/View/Settings/Scenes/Widgets/Widget/WheelOfLuck/WidgetWheelOfLuckSettingsView.swift b/Moblin/View/Settings/Scenes/Widgets/Widget/WheelOfLuck/WidgetWheelOfLuckSettingsView.swift index 2d7ddc113..90709a279 100644 --- a/Moblin/View/Settings/Scenes/Widgets/Widget/WheelOfLuck/WidgetWheelOfLuckSettingsView.swift +++ b/Moblin/View/Settings/Scenes/Widgets/Widget/WheelOfLuck/WidgetWheelOfLuckSettingsView.swift @@ -2,38 +2,6 @@ import SwiftUI let wheelOfLuckOptionWeights = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 40, 60, 80, 100] -private struct WheelOfLuckWidgetView: View { - let model: Model - @ObservedObject var widget: SettingsWidget - let effect: WheelOfLuckEffect - let indented: Bool - - var body: some View { - HStack(spacing: 20) { - if indented { - Text("") - Text("").frame(width: iconWidth) - } - Spacer() - Button { - widget.wheelOfLuck.shuffle() - model.getWheelOfLuckEffect(id: widget.id)?.setSettings(settings: widget.wheelOfLuck) - - } label: { - Image(systemName: "shuffle") - .font(.title) - } - Button { - effect.spin() - } label: { - Image(systemName: "play") - .font(.title) - } - } - .buttonStyle(.borderless) - } -} - private struct OptionView: View { let model: Model let widget: SettingsWidget @@ -77,17 +45,6 @@ private struct OptionView: View { } } -struct WidgetWheelOfLuckQuickButtonControlsView: View { - let model: Model - @ObservedObject var widget: SettingsWidget - - var body: some View { - if let effect = model.getWheelOfLuckEffect(id: widget.id) { - WheelOfLuckWidgetView(model: model, widget: widget, effect: effect, indented: true) - } - } -} - struct WheelOfLuckWidgetOptionsView: View { @Binding var value: String @FocusState private var editingText: Bool diff --git a/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift b/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift index ff729a036..683f95a94 100644 --- a/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift +++ b/Moblin/View/Settings/Streams/Stream/StreamSettingsView.swift @@ -270,6 +270,12 @@ struct StreamSettingsView: View { } label: { Text("RIST") } + case .whip: + NavigationLink { + StreamWhipSettingsView(stream: stream) + } label: { + Text("WHIP") + } } } } header: { diff --git a/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift index c2af6c43c..8d3f38c30 100644 --- a/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift +++ b/Moblin/View/Settings/Streams/Stream/Url/StreamUrlSettingsView.swift @@ -108,6 +108,22 @@ private struct SrtHelpView: View { } } +private struct WhipHelpView: View { + var body: some View { + Section { + VStack(alignment: .leading) { + Text("Template: https://my_domain/my_endpoint") + Text("Example: https://example.com/live/whip") + Text("Example: http://192.168.1.50:8080/live/whip") + Text("") + Text("WHIP uses HTTP/HTTPS endpoints. (whip:// is also accepted for compatibility.)") + } + } header: { + Text("WHIP") + } + } +} + private struct UrlSettingsView: View { @EnvironmentObject var model: Model @Environment(\.dismiss) var dismiss @@ -172,6 +188,7 @@ private struct UrlSettingsView: View { RtmpHelpView(stream: stream) if showSrtHelp { SrtHelpView() + WhipHelpView() } } .navigationTitle("Help") diff --git a/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift new file mode 100644 index 000000000..02a0543d1 --- /dev/null +++ b/Moblin/View/Settings/Streams/Stream/Whip/StreamWhipSettingsView.swift @@ -0,0 +1,60 @@ +import SwiftUI + +struct StreamWhipSettingsView: View { + @EnvironmentObject var model: Model + let stream: SettingsStream + + private func iceServersString() -> String { + stream.whip.iceServers.joined(separator: "\n") + } + + private func parseIceServers(_ value: String) -> [String] { + value + .split(whereSeparator: { $0 == "\n" || $0 == "," }) + .map { String($0).trim() } + .filter { !$0.isEmpty } + } + + var body: some View { + Form { + Section { + MultiLineTextFieldNavigationView( + title: String(localized: "ICE servers"), + value: iceServersString(), + onSubmit: { value in + stream.whip.iceServers = parseIceServers(value) + model.reloadStreamIfEnabled(stream: stream) + }, + footers: [ + String(localized: "Enter STUN/TURN URLs, one per line."), + String(localized: "Example: stun:stun.l.google.com:19302"), + String(localized: "Note: Custom ICE servers may be ignored depending on WHIP backend."), + ] + ) + + NavigationLink { + TextEditView( + title: String(localized: "Max retries"), + value: String(stream.whip.maxRetryCount), + keyboardType: .numberPad + ) { value in + guard let retry = Int(value), retry >= 0, retry <= 20 else { + return + } + stream.whip.maxRetryCount = retry + model.reloadStreamIfEnabled(stream: stream) + } + } label: { + TextItemView( + name: String(localized: "Max retries"), + value: String(stream.whip.maxRetryCount), + color: .gray + ) + } + .disabled(stream.enabled && model.isLive) + } + } + .navigationTitle("WHIP") + } +} + diff --git a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift index e86c4567f..74a37674d 100644 --- a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift +++ b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomSettingsView.swift @@ -22,6 +22,11 @@ struct StreamWizardCustomSettingsView: View { } label: { Text("RIST") } + NavigationLink { + StreamWizardCustomWhipSettingsView(model: model, createStreamWizard: createStreamWizard) + } label: { + Text("WHIP") + } } header: { Text("Protocol") } diff --git a/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift new file mode 100644 index 000000000..b4497c9cf --- /dev/null +++ b/Moblin/View/Settings/Streams/Stream/Wizard/Custom/StreamWizardCustomWhipSettingsView.swift @@ -0,0 +1,58 @@ +import SwiftUI + +struct StreamWizardCustomWhipSettingsView: View { + let model: Model + @ObservedObject var createStreamWizard: CreateStreamWizard + @State var urlError = "" + + private func nextDisabled() -> Bool { + return createStreamWizard.customWhipUrl.isEmpty || !urlError.isEmpty + } + + private func updateUrlError() { + let url = cleanUrl(url: createStreamWizard.customWhipUrl) + if url.isEmpty { + urlError = "" + } else { + urlError = isValidUrl(url: url, allowedSchemes: ["http", "https", "whip", "whips"]) ?? "" + } + } + + var body: some View { + Form { + Section { + TextField("https://example.com/live/whip", text: $createStreamWizard.customWhipUrl) + .textInputAutocapitalization(.never) + .disableAutocorrection(true) + .onChange(of: createStreamWizard.customWhipUrl) { _ in + updateUrlError() + } + } header: { + Text("Url") + } footer: { + FormFieldError(error: urlError) + } + + Section { + NavigationLink { + StreamWizardGeneralSettingsView(model: model, createStreamWizard: createStreamWizard) + } label: { + WizardNextButtonView() + } + .disabled(nextDisabled()) + } + } + .onAppear { + createStreamWizard.customProtocol = .whip + createStreamWizard.name = makeUniqueName( + name: String(localized: "Custom WHIP"), + existingNames: model.database.streams + ) + } + .navigationTitle("WHIP") + .toolbar { + CreateStreamWizardToolbar(createStreamWizard: createStreamWizard) + } + } +} + diff --git a/Moblin/View/Settings/Streams/Stream/YouTube/StreamYouTubeSettingsView.swift b/Moblin/View/Settings/Streams/Stream/YouTube/StreamYouTubeSettingsView.swift index 432c667ce..b84f4b38a 100644 --- a/Moblin/View/Settings/Streams/Stream/YouTube/StreamYouTubeSettingsView.swift +++ b/Moblin/View/Settings/Streams/Stream/YouTube/StreamYouTubeSettingsView.swift @@ -13,10 +13,7 @@ private struct StreamDescriptionView: View { let startTime: Date private func details() -> String { - var details = [stream.status.visibility()?.toString() ?? String(localized: "Unknown")] - if stream.contentDetails.enableAutoStart { - details.append(String(localized: "Auto-start")) - } + var details = [stream.status.privacyStatus] if stream.contentDetails.enableAutoStop { details.append(String(localized: "Auto-stop")) } diff --git a/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift b/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift new file mode 100644 index 000000000..9d65a811d --- /dev/null +++ b/Moblin/View/Settings/WhepClient/WhepClientSettingsView.swift @@ -0,0 +1,50 @@ +import SwiftUI + +struct WhepClientSettingsView: View { + @EnvironmentObject var model: Model + @ObservedObject var whepClient: SettingsWhepClient + @State var numberOfEnabledStreams: Int = 0 + + private func status() -> String { + return String(numberOfEnabledStreams) + } + + var body: some View { + NavigationLink { + Form { + Section { + List { + ForEach(whepClient.streams) { stream in + WhepClientStreamSettingsView(whepClient: whepClient, stream: stream) + } + .onDelete { indexes in + whepClient.streams.remove(atOffsets: indexes) + model.reloadWhepClient() + } + } + CreateButtonView { + let stream = SettingsWhepClientStream() + stream.name = makeUniqueName(name: SettingsWhepClientStream.baseName, + existingNames: whepClient.streams) + whepClient.streams.append(stream) + } + } header: { + Text("Streams") + } footer: { + SwipeLeftToDeleteHelpView(kind: String(localized: "a stream")) + } + } + .navigationTitle("WHEP client") + } label: { + HStack { + Text("WHEP client") + Spacer() + GrayTextView(text: status()) + } + } + .onAppear { + numberOfEnabledStreams = whepClient.streams.filter { $0.enabled }.count + } + } +} + diff --git a/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift b/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift new file mode 100644 index 000000000..9b1133892 --- /dev/null +++ b/Moblin/View/Settings/WhepClient/WhepClientStreamSettingsView.swift @@ -0,0 +1,73 @@ +import SwiftUI + +struct WhepClientStreamSettingsView: View { + @EnvironmentObject var model: Model + @ObservedObject var whepClient: SettingsWhepClient + @ObservedObject var stream: SettingsWhepClientStream + + var body: some View { + NavigationLink { + Form { + Section { + NameEditView(name: $stream.name, existingNames: whepClient.streams) + } + Section { + TextEditNavigationView( + title: String(localized: "URL"), + value: stream.url, + onSubmit: { + stream.url = $0 + model.reloadWhepClient() + }, + footers: [ + "https://example.com/whep/myStream", + "http://192.168.1.10:8080/whep/myStream", + ], + placeholder: "https://example.com/whep/myStream" + ) + } + Section { + TextEditNavigationView( + title: String(localized: "Latency"), + value: String(stream.latency), + onChange: { + guard let latency = Int32($0) else { + return String(localized: "Not a number") + } + guard latency >= 250 else { + return String(localized: "Too small") + } + guard latency <= 10000 else { + return String(localized: "Too big") + } + return nil + }, + onSubmit: { + guard let latency = Int32($0) else { + return + } + stream.latency = latency + model.reloadWhepClient() + }, + footers: [String(localized: "250 or more milliseconds. 2000 ms by default.")], + keyboardType: .numbersAndPunctuation, + valueFormat: { "\($0) ms" } + ) + } footer: { + Text("The higher, the lower risk of stuttering.") + } + } + .navigationTitle("Stream") + } label: { + Toggle(isOn: $stream.enabled) { + HStack { + Text(stream.name) + } + } + .onChange(of: stream.enabled) { _ in + model.reloadWhepClient() + } + } + } +} + diff --git a/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift b/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift new file mode 100644 index 000000000..51f3391e1 --- /dev/null +++ b/Moblin/View/Settings/WhipServer/WhipServerSettingsView.swift @@ -0,0 +1,111 @@ +import SwiftUI + +struct WhipServerSettingsView: View { + @EnvironmentObject var model: Model + @ObservedObject var whipServer: SettingsWhipServer + + private func submitPort(value: String) { + guard let port = UInt16(value) else { + return + } + whipServer.port = port + model.reloadWhipServer() + } + + private func status() -> String { + if whipServer.enabled { + return String(whipServer.streams.count) + } else { + return "0" + } + } + + var body: some View { + NavigationLink { + Form { + Section { + Text(""" + The WHIP server allows Moblin to receive video streams over the network using WebRTC (WHIP). + """) + } + Section { + Toggle("Enabled", isOn: $whipServer.enabled) + .onChange(of: whipServer.enabled) { _ in + model.reloadWhipServer() + } + } + if whipServer.enabled { + Section { + HStack { + Image(systemName: "info.circle.fill") + .foregroundStyle(.blue) + Text("Disable the WHIP server to change its settings.") + } + } + } + Section { + TextEditNavigationView( + title: String(localized: "Port"), + value: String(whipServer.port), + onChange: isValidPort, + onSubmit: submitPort, + keyboardType: .numbersAndPunctuation + ) + .disabled(whipServer.enabled) + } footer: { + Text("The TCP port the WHIP server listens for publishers on.") + } + Section { + List { + let list = ForEach(whipServer.streams) { stream in + WhipServerStreamSettingsView( + status: model.statusOther, + whipServer: whipServer, + stream: stream + ) + } + if !whipServer.enabled { + list.onDelete { indexes in + whipServer.streams.remove(atOffsets: indexes) + model.reloadWhipServer() + model.updateMicsListAsync() + } + } else { + list + } + } + CreateButtonView { + let stream = SettingsWhipServerStream() + stream.name = makeUniqueName(name: SettingsWhipServerStream.baseName, + existingNames: whipServer.streams) + while true { + stream.streamKey = randomHumanString() + if model.getWhipStream(streamKey: stream.streamKey) == nil { + break + } + } + whipServer.streams.append(stream) + model.updateMicsListAsync() + } + .disabled(model.whipServerEnabled()) + } header: { + Text("Streams") + } footer: { + VStack(alignment: .leading) { + Text("Each stream can receive video from one WHIP publisher on the local network.") + Text("") + SwipeLeftToDeleteHelpView(kind: String(localized: "a stream")) + } + } + } + .navigationTitle("WHIP server") + } label: { + HStack { + Text("WHIP server") + Spacer() + GrayTextView(text: status()) + } + } + } +} + diff --git a/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift b/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift new file mode 100644 index 000000000..e92fc9c74 --- /dev/null +++ b/Moblin/View/Settings/WhipServer/WhipServerStreamSettingsView.swift @@ -0,0 +1,124 @@ +import Network +import SwiftUI + +private struct UrlsView: View { + @ObservedObject var status: StatusOther + let port: UInt16 + let streamKey: String + + private func formatUrl(ip: String) -> String { + return "http://\(ip):\(port)/whip/\(streamKey)" + } + + var body: some View { + NavigationLink { + Form { + UrlsIpv4View(status: status, formatUrl: formatUrl) + UrlsIpv6View(status: status, formatUrl: formatUrl) + } + .navigationTitle("URLs") + } label: { + Text("URLs") + } + } +} + +struct WhipServerStreamSettingsView: View { + @EnvironmentObject var model: Model + @ObservedObject var status: StatusOther + @ObservedObject var whipServer: SettingsWhipServer + @ObservedObject var stream: SettingsWhipServerStream + + private func changeStreamKey(value: String) -> String? { + if model.getWhipStream(streamKey: value.trim()) == nil { + return nil + } + return String(localized: "Already in use") + } + + private func submitStreamKey(value: String) { + let streamKey = value.trim() + if model.getWhipStream(streamKey: streamKey) != nil { + return + } + stream.streamKey = streamKey + } + + private func changeLatency(value: String) -> String? { + guard let latency = Int32(value) else { + return String(localized: "Not a number") + } + guard latency >= 250 else { + return String(localized: "Too small") + } + guard latency <= 10000 else { + return String(localized: "Too big") + } + return nil + } + + private func submitLatency(value: String) { + guard let latency = Int32(value) else { + return + } + stream.latency = latency + } + + var body: some View { + NavigationLink { + Form { + Section { + NameEditView(name: $stream.name, existingNames: whipServer.streams) + .disabled(model.whipServerEnabled()) + TextEditNavigationView( + title: String(localized: "Stream key"), + value: stream.streamKey, + onChange: changeStreamKey, + onSubmit: submitStreamKey + ) + .disabled(model.whipServerEnabled()) + } footer: { + Text("The stream name is shown in the list of cameras in scene settings.") + } + Section { + TextEditNavigationView( + title: String(localized: "Latency"), + value: String(stream.latency), + onChange: changeLatency, + onSubmit: submitLatency, + footers: [String(localized: "250 or more milliseconds. 2000 ms by default.")], + keyboardType: .numbersAndPunctuation, + valueFormat: { "\($0) ms" } + ) + .disabled(model.whipServerEnabled()) + } footer: { + Text("The higher, the lower risk of stuttering.") + } + Section { + UrlsView(status: status, port: whipServer.port, streamKey: stream.streamKey) + } header: { + Text("Publish URLs") + } footer: { + VStack(alignment: .leading) { + Text(""" + Enter one of the URLs into the WHIP publisher device to send video \ + to this stream. Usually enter the WiFi or Personal Hotspot URL. + """) + } + } + } + .navigationTitle("Stream") + } label: { + HStack { + if model.isWhipStreamConnected(streamKey: stream.streamKey) { + Image(systemName: "cable.connector") + } else { + Image(systemName: "cable.connector.slash") + } + Text(stream.name) + Spacer() + } + } + } +} + diff --git a/Moblin/View/Stream/Overlay/StreamOverlayLeftView.swift b/Moblin/View/Stream/Overlay/StreamOverlayLeftView.swift index 3c7ca9f11..acaeeba6c 100644 --- a/Moblin/View/Stream/Overlay/StreamOverlayLeftView.swift +++ b/Moblin/View/Stream/Overlay/StreamOverlayLeftView.swift @@ -73,45 +73,6 @@ private struct ViewersView: View { } } -private struct ChatStatusView: View { - @ObservedObject var status: StatusTopLeft - let foregroundColor: Color - - var body: some View { - HStack(spacing: 1) { - Image(systemName: "message") - .frame(width: 17, height: 17) - .padding([.leading, .trailing], 2) - .foregroundStyle(foregroundColor) - .background(backgroundColor) - .cornerRadius(5) - HStack(spacing: 2) { - if status.chatPlatformStatuses.isEmpty { - Text(status.statusChatText) - } else { - ForEach(status.chatPlatformStatuses, id: \.platform) { - ViewersLogoView(platform: $0.platform) - if $0.connected { - Text("Connected") - .foregroundStyle(.white) - } else { - Text("Disconnected") - .foregroundStyle(.red) - } - } - } - } - .padding([.leading, .trailing], 2) - .background(backgroundColor) - .cornerRadius(5) - } - .font(smallFont) - .padding(20) - .contentShape(Rectangle()) - .padding(-20) - } -} - private struct StreamStatusView: View { @ObservedObject var status: StatusTopLeft let textPlacement: StreamOverlayIconAndTextPlacement @@ -226,16 +187,12 @@ private struct StatusesView: View { ) } if model.isShowingStatusChat() { - if textPlacement == .hide { - StreamOverlayIconAndTextView( - icon: "message", - text: status.statusChatText, - textPlacement: textPlacement, - color: chatColor() - ) - } else { - ChatStatusView(status: status, foregroundColor: chatColor()) - } + StreamOverlayIconAndTextView( + icon: "message", + text: status.statusChatText, + textPlacement: textPlacement, + color: chatColor() + ) } if model.isShowingStatusViewers() { if textPlacement == .hide { diff --git a/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..e5df7ee13 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,70 @@ +name: Bug Report +description: | + Before posting, confirm whether the issue can be reproduced in the Example project as well. +body: + - type: markdown + attributes: + value: | + * Please review the [support guidelines](https://github.com/HaishinKit/.github/blob/main/SUPPORT.md) first. If the report lacks sufficient details, it will be closed. + * Development for version 1.x.x has ended. Issues are no longer accepted. + - type: textarea + id: observed-behavior + attributes: + label: Observed behavior + description: Explain what happened. + validations: + required: true + - type: textarea + id: expected-behavior + attributes: + label: Expected behavior + description: Explain what you expect. + validations: + required: true + - type: textarea + id: to-reproduce + attributes: + label: To Reproduce + description: Steps to reproduce the behavior + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + - type: textarea + id: version + attributes: + label: Version + description: What version of our software are you running? + validations: + required: true + - type: textarea + id: smartphone-info + attributes: + label: Smartphone info. + description: please complete the following information + placeholder: | + - Device: [e.g. iPhone 15 Pro] + - OS: [e.g. iOS 18.1] + - type: textarea + id: addditional-context + attributes: + label: Additional context + description: Add any other context about the problem here. If you encounter issues while using HaishinKit for streaming or viewing, please provide the server name as well as the name of the software you are using for streaming or viewing. + placeholder: | + - Server: [e.g. nginx + rtmp module, YouTube] + - Ingest software: [e.g. OBS RTMP H264(main) AAC] + - Playback software: [e.g. iOS AVAudioPlayer(via HLS)] + - type: textarea + id: screenshot + attributes: + label: Screenshots + description: If applicable, add screenshots to help explain your problem. + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: shell diff --git a/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..768b98192 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: Question + url: https://github.com/HaishinKit/HaishinKit.swift/discussions + about: Please ask and answer questions here with *Q&A* + - name: Feature request + url: https://github.com/HaishinKit/HaishinKit.swift/discussions + about: You'd like to discuss a feature request here with *Idea*. diff --git a/Vendor/HaishinKit.swift/.github/dependabot.yml b/Vendor/HaishinKit.swift/.github/dependabot.yml new file mode 100644 index 000000000..cad58f637 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/dependabot.yml @@ -0,0 +1,13 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates +version: 2 +updates: + - package-ecosystem: "bundler" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + day: "sunday" + time: "09:00" + timezone: "Asia/Tokyo" diff --git a/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml b/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml new file mode 100644 index 000000000..4c6826361 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/workflows/ai-moderator.yml @@ -0,0 +1,30 @@ +name: AI Moderator +on: + issues: + types: [opened] + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + +jobs: + spam-detection: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + models: read + contents: read + steps: + - uses: actions/checkout@v4 + - uses: github/ai-moderator@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + spam-label: 'spam' + ai-label: 'ai-generated' + minimize-detected-comments: true + # Built-in prompt configuration (all enabled by default) + enable-spam-detection: true + enable-link-spam-detection: true + # enable-ai-detection: true + # custom-prompt-path: '.github/prompts/my-custom.prompt.yml' # Optional diff --git a/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml b/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml new file mode 100644 index 000000000..acd432f98 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/workflows/lock-thread.yml @@ -0,0 +1,17 @@ +name: 'Lock Threads' +on: + schedule: + - cron: '0 1 * * 1,4' + workflow_dispatch: +permissions: + issues: write +concurrency: + group: lock +jobs: + action: + runs-on: ubuntu-latest + steps: + - uses: dessant/lock-threads@v4 + with: + issue-inactive-days: '60' + process-only: 'issues' diff --git a/Vendor/HaishinKit.swift/.github/workflows/release.yml b/Vendor/HaishinKit.swift/.github/workflows/release.yml new file mode 100644 index 000000000..190200cbf --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/workflows/release.yml @@ -0,0 +1,24 @@ +name: Release +on: + push: + tags: + - '*.*.*' +jobs: + build: + runs-on: macos-15 + steps: + - uses: actions/checkout@v4 + - name: Select Xcode version + run: sudo xcode-select -s '/Applications/Xcode_26.0.app' + - name: Setup SSH + run: | + mkdir -p ~/.ssh + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan github.com >> ~/.ssh/known_hosts + - name: bundle exec fastlane document + env: + DANGER_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + bundle install + bundle exec fastlane document version:${{ github.ref_name }} diff --git a/Vendor/HaishinKit.swift/.github/workflows/review.yml b/Vendor/HaishinKit.swift/.github/workflows/review.yml new file mode 100644 index 000000000..d1bda2d70 --- /dev/null +++ b/Vendor/HaishinKit.swift/.github/workflows/review.yml @@ -0,0 +1,15 @@ +name: Review +on: [pull_request] +jobs: + build: + runs-on: macos-26 + steps: + - uses: actions/checkout@v4 + - name: Select Xcode version + run: sudo xcode-select -s '/Applications/Xcode_26.2.0.app' + - name: bundle install + env: + DANGER_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + bundle install + bundle exec fastlane review diff --git a/Vendor/HaishinKit.swift/.gitignore b/Vendor/HaishinKit.swift/.gitignore new file mode 100644 index 000000000..aa0fdf193 --- /dev/null +++ b/Vendor/HaishinKit.swift/.gitignore @@ -0,0 +1,33 @@ +*.xcodeproj/xcuserdata/* +*.xcworkspace/xcuserdata/* +build +.build +*.xccheckout +*.moved-aside +*.xcuserstate +*.xcscmblueprint +.DS_Store +Pods + +soe/bat/* +soe/obj/* +soe/bin/* +soe/air/* +soe/AIR_readme.txt + +Carthage/* + +.idea/ +.vscode/ +.swiftpm/ + +*.xcsettings +docs/undocumented.json +fastlane/report.xml +libdatachannel.xcframework +docs/docsets/HaishinKit.docset/Contents/Resources/Documents/undocumented.json +HaishinKit.json +SRTHaishinKit.json +SRTHaishinKit/Vendor/SRT/OpenSSL/ +SRTHaishinKit/Vendor/SRT/srt/ +SRTHaishinKit/Vendor/SRT/Includes/ diff --git a/Vendor/HaishinKit.swift/.ruby-version b/Vendor/HaishinKit.swift/.ruby-version new file mode 100644 index 000000000..f9892605c --- /dev/null +++ b/Vendor/HaishinKit.swift/.ruby-version @@ -0,0 +1 @@ +3.4.4 diff --git a/Vendor/HaishinKit.swift/.swiftlint.yml b/Vendor/HaishinKit.swift/.swiftlint.yml new file mode 100644 index 000000000..601e04651 --- /dev/null +++ b/Vendor/HaishinKit.swift/.swiftlint.yml @@ -0,0 +1,89 @@ +disabled_rules: + - identifier_name + - force_cast + - line_length + - type_body_length + - function_body_length + - file_length + - cyclomatic_complexity + - compiler_protocol_init +analyzer_rules: + - unused_declaration + - unused_import +excluded: + - Carthage + - Pods + - Vendor + - .build +opt_in_rules: +# - anyobject_protocol + - array_init +# - attributes +# - closure_body_length + - closure_end_indentation + - closure_spacing + - collection_alignment + - conditional_returns_on_newline + - contains_over_first_not_nil + - convenience_type +# - discouraged_object_literal + - discouraged_optional_boolean +# - discouraged_optional_collection + - empty_count + - empty_string +# - explicit_acl +# - explicit_enum_raw_value +# - explicit_init +# - explicit_self +# - explicit_type_interface +# - fallthrough +# - file_header +# - file_name + - first_where +# - force_unwrapping + - function_default_parameter_at_end + - identical_operands +# - implicit_return + - joined_default_parameter + - last_where + - legacy_random + - let_var_whitespace + - literal_expression_end_indentation +# - lower_acl_than_parent +# - missing_docs + - modifier_order + - multiline_arguments + - multiline_function_chains + - multiline_literal_brackets + - multiline_parameters +# - no_extension_access_modifier +# - no_grouping_extension +# - number_separator + - operator_usage_whitespace + - overridden_super_call + - override_in_extension + - pattern_matching_keywords +# - prefixed_toplevel_constant + - private_outlet + - private_over_fileprivate +# - prohibited_interface_builder + - quick_discouraged_call + - redundant_nil_coalescing + - redundant_type_annotation + - required_enum_case + - shorthand_operator + - sorted_first_last + - sorted_imports + - static_operator +# - strict_fileprivate + - switch_case_on_newline + - toggle_bool +# - trailing_closure + - unavailable_function + - unneeded_parentheses_in_closure_argument + - untyped_error_in_catch + - vertical_parameter_alignment_on_call +# - vertical_whitespace_between_cases + - vertical_whitespace_closing_braces + - vertical_whitespace_opening_braces +# - yoda_condition diff --git a/Vendor/HaishinKit.swift/Dangerfile b/Vendor/HaishinKit.swift/Dangerfile new file mode 100644 index 000000000..e78afff59 --- /dev/null +++ b/Vendor/HaishinKit.swift/Dangerfile @@ -0,0 +1,3 @@ +github.dismiss_out_of_range_messages + +privacymanifest.report diff --git a/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift b/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift new file mode 100644 index 000000000..7eff82bfb --- /dev/null +++ b/Vendor/HaishinKit.swift/DebugDescription/AVAudioFormat+DebugExtension.swift @@ -0,0 +1,109 @@ +import AVFoundation + +#if DEBUG +extension AVAudioCommonFormat: CustomDebugStringConvertible { + public var debugDescription: String { + switch self { + case .pcmFormatFloat32: + return "float32" + case .pcmFormatFloat64: + return "float64" + case .pcmFormatInt16: + return "int16" + case .pcmFormatInt32: + return "int32" + case .otherFormat: + return "other" + @unknown default: + return "unknown" + } + } +} + +extension AudioFormatID: CustomDebugStringConvertible { + public var debugDescription: String { + switch self { + case kAudioFormatAC3: + return "kAudioFormatAC3" + case kAudioFormatAES3: + return "kAudioFormatAES3" + case kAudioFormatALaw: + return "kAudioFormatALaw" + case kAudioFormatAMR: + return "kAudioFormatAMR" + case kAudioFormatAMR_WB: + return "kAudioFormatAMR_WB" + case kAudioFormatAppleIMA4: + return "kAudioFormatAppleIMA4" + case kAudioFormatAppleLossless: + return "kAudioFormatAppleLossless" + case kAudioFormatAudible: + return "kAudioFormatAudible" + case kAudioFormatDVIIntelIMA: + return "kAudioFormatDVIIntelIMA" + case kAudioFormatEnhancedAC3: + return "kAudioFormatEnhancedAC3" + case kAudioFormatFLAC: + return "kAudioFormatFLAC" + case kAudioFormatLinearPCM: + return "kAudioFormatLinearPCM" + case kAudioFormatMACE3: + return "kAudioFormatMACE3" + case kAudioFormatMACE6: + return "kAudioFormatMACE6" + case kAudioFormatMIDIStream: + return "kAudioFormatMIDIStream" + case kAudioFormatMPEG4AAC: + return "kAudioFormatMPEG4AAC" + case kAudioFormatMPEG4AAC_ELD: + return "kAudioFormatMPEG4AAC_ELD" + case kAudioFormatMPEG4AAC_ELD_SBR: + return "kAudioFormatMPEG4AAC_ELD_SBR" + case kAudioFormatMPEG4AAC_ELD_V2: + return "kAudioFormatMPEG4AAC_ELD_V2" + case kAudioFormatMPEG4AAC_HE: + return "kAudioFormatMPEG4AAC_HE" + case kAudioFormatMPEG4AAC_HE_V2: + return "kAudioFormatMPEG4AAC_HE_V2" + case kAudioFormatMPEG4AAC_LD: + return "kAudioFormatMPEG4AAC_LD" + case kAudioFormatMPEG4AAC_Spatial: + return "kAudioFormatMPEG4AAC_Spatial" + case kAudioFormatMPEG4CELP: + return "kAudioFormatMPEG4CELP" + case kAudioFormatMPEG4HVXC: + return "kAudioFormatMPEG4HVXC" + case kAudioFormatMPEG4TwinVQ: + return "kAudioFormatMPEG4TwinVQ" + case kAudioFormatMPEGD_USAC: + return "kAudioFormatMPEGD_USAC" + case kAudioFormatMPEGLayer1: + return "kAudioFormatMPEGLayer1" + case kAudioFormatMPEGLayer2: + return "kAudioFormatMPEGLayer2" + case kAudioFormatMPEGLayer3: + return "kAudioFormatMPEGLayer3" + case kAudioFormatMicrosoftGSM: + return "kAudioFormatMicrosoftGSM" + case kAudioFormatOpus: + return "kAudioFormatOpus" + case kAudioFormatParameterValueStream: + return "kAudioFormatParameterValueStream" + case kAudioFormatQDesign: + return "kAudioFormatQDesign" + case kAudioFormatQDesign2: + return "kAudioFormatQDesign2" + case kAudioFormatQUALCOMM: + return "kAudioFormatQUALCOMM" + case kAudioFormatTimeCode: + return "kAudioFormatTimeCode" + case kAudioFormatULaw: + return "kAudioFormatULaw" + case kAudioFormatiLBC: + return "kAudioFormatiLBC" + default: + return "unknown" + } + } +} +#endif diff --git a/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift b/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift new file mode 100644 index 000000000..ad76297fc --- /dev/null +++ b/Vendor/HaishinKit.swift/DebugDescription/AudioNode+DebugExtension.swift @@ -0,0 +1,182 @@ +import AVFoundation +import Foundation + +extension AudioNode: CustomStringConvertible { + var description: String { + var description: [String] = [] + + for scope in BusScope.allCases { + guard let busCount = try? busCount(scope: scope) else { + description.append("failed to get \(scope.rawValue) bus count") + continue + } + guard busCount > 0 else { + continue + } + var busDescription: [String] = [] + for busIndex in 0...size + let parameterIds = UnsafeMutablePointer.allocate(capacity: numberOfParameters) + defer { parameterIds.deallocate() } + + if numberOfParameters > 0 { + status = AudioUnitGetProperty(audioUnit, + kAudioUnitProperty_ParameterList, + kAudioUnitScope_Global, + 0, + parameterIds, + ¶meterListSize) + guard status == noErr else { + throw Error.unableToRetrieveValue(status) + } + } + + var info = AudioUnitParameterInfo() + var infoSize = UInt32(MemoryLayout.size) + + for i in 0.. Set { + var result = Set() + allCases.forEach { flag in + if flag.flagValue & flagOptionSet == flag.flagValue { + result.insert(flag) + } + } + return result + } + + static func flagOptionSet(from flagSet: Set) -> AudioFormatFlags { + var optionSet: AudioFormatFlags = 0 + flagSet.forEach { flag in + optionSet |= flag.flagValue + } + return optionSet + } + + public var description: String { + rawValue + } + } + + struct ReadableFlagOptionSet: OptionSet, CustomStringConvertible { + public let rawValue: AudioFormatFlags + public let flags: Set + + public init(rawValue value: AudioFormatFlags) { + self.rawValue = value + flags = ReadableFormatFlag.flags(from: rawValue) + } + + public var description: String { + guard ReadableFormatFlag.flagOptionSet(from: flags) == rawValue else { + return "Unable to parse AudioFormatFlags" + } + let result = flags.sorted(by: { $0.rawValue < $1.rawValue }).map { $0.description }.joined(separator: " | ") + return "AudioFormatFlags(\(result))" + } + } + + var readableFormatID: String { + switch mFormatID { + // swiftlint:disable switch_case_on_newline + case kAudioFormatLinearPCM: return "LinearPCM" + case kAudioFormatAC3: return "AC3" + case kAudioFormat60958AC3: return "60958AC3" + case kAudioFormatAppleIMA4: return "AppleIMA4" + case kAudioFormatMPEG4AAC: return "MPEG4AAC" + case kAudioFormatMPEG4CELP: return "MPEG4CELP" + case kAudioFormatMPEG4HVXC: return "MPEG4HVXC" + case kAudioFormatMPEG4TwinVQ: return "MPEG4TwinVQ" + case kAudioFormatMACE3: return "MACE3" + case kAudioFormatMACE6: return "MACE6" + case kAudioFormatULaw: return "ULaw" + case kAudioFormatALaw: return "ALaw" + case kAudioFormatQDesign: return "QDesign" + case kAudioFormatQDesign2: return "QDesign2" + case kAudioFormatQUALCOMM: return "QUALCOMM" + case kAudioFormatMPEGLayer1: return "MPEGLayer1" + case kAudioFormatMPEGLayer2: return "MPEGLayer2" + case kAudioFormatMPEGLayer3: return "MPEGLayer3" + case kAudioFormatTimeCode: return "TimeCode" + case kAudioFormatMIDIStream: return "MIDIStream" + case kAudioFormatParameterValueStream: return "ParameterValueStream" + case kAudioFormatAppleLossless: return "AppleLossless" + case kAudioFormatMPEG4AAC_HE: return "MPEG4AAC_HE" + case kAudioFormatMPEG4AAC_LD: return "MPEG4AAC_LD" + case kAudioFormatMPEG4AAC_ELD: return "MPEG4AAC_ELD" + case kAudioFormatMPEG4AAC_ELD_SBR: return "MPEG4AAC_ELD_SBR" + case kAudioFormatMPEG4AAC_ELD_V2: return "MPEG4AAC_ELD_V2" + case kAudioFormatMPEG4AAC_HE_V2: return "MPEG4AAC_HE_V2" + case kAudioFormatMPEG4AAC_Spatial: return "MPEG4AAC_Spatial" + case kAudioFormatAMR: return "AMR" + case kAudioFormatAMR_WB: return "AMR_WB" + case kAudioFormatAudible: return "Audible" + case kAudioFormatiLBC: return "iLBC" + case kAudioFormatDVIIntelIMA: return "DVIIntelIMA" + case kAudioFormatMicrosoftGSM: return "MicrosoftGSM" + case kAudioFormatAES3: return "AES3" + case kAudioFormatEnhancedAC3: return "EnhancedAC3" + default: return "unknown_(\(Int(mFormatID)))" + // swiftlint:enable switch_case_on_newline + } + } + + var readableFlags: ReadableFlagOptionSet { + ReadableFlagOptionSet(rawValue: mFormatFlags) + } +} + +extension AudioStreamBasicDescription: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + public var debugDescription: String { + "AudioStreamBasicDescription(mSampleRate: \(mSampleRate), mFormatID: \(mFormatID) \(readableFormatID), " + + "mFormatFlags: \(mFormatFlags) \(readableFlags), mBytesPerPacket: \(mBytesPerPacket), " + + "mFramesPerPacket: \(mFramesPerPacket), mBytesPerFrame: \(mBytesPerFrame), " + + "mChannelsPerFrame: \(mChannelsPerFrame), mBitsPerChannel: \(mBitsPerChannel), mReserved: \(mReserved)" + } +} + +#endif diff --git a/Vendor/HaishinKit.swift/Examples/.swiftlint.yml b/Vendor/HaishinKit.swift/Examples/.swiftlint.yml new file mode 100644 index 000000000..601e04651 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/.swiftlint.yml @@ -0,0 +1,89 @@ +disabled_rules: + - identifier_name + - force_cast + - line_length + - type_body_length + - function_body_length + - file_length + - cyclomatic_complexity + - compiler_protocol_init +analyzer_rules: + - unused_declaration + - unused_import +excluded: + - Carthage + - Pods + - Vendor + - .build +opt_in_rules: +# - anyobject_protocol + - array_init +# - attributes +# - closure_body_length + - closure_end_indentation + - closure_spacing + - collection_alignment + - conditional_returns_on_newline + - contains_over_first_not_nil + - convenience_type +# - discouraged_object_literal + - discouraged_optional_boolean +# - discouraged_optional_collection + - empty_count + - empty_string +# - explicit_acl +# - explicit_enum_raw_value +# - explicit_init +# - explicit_self +# - explicit_type_interface +# - fallthrough +# - file_header +# - file_name + - first_where +# - force_unwrapping + - function_default_parameter_at_end + - identical_operands +# - implicit_return + - joined_default_parameter + - last_where + - legacy_random + - let_var_whitespace + - literal_expression_end_indentation +# - lower_acl_than_parent +# - missing_docs + - modifier_order + - multiline_arguments + - multiline_function_chains + - multiline_literal_brackets + - multiline_parameters +# - no_extension_access_modifier +# - no_grouping_extension +# - number_separator + - operator_usage_whitespace + - overridden_super_call + - override_in_extension + - pattern_matching_keywords +# - prefixed_toplevel_constant + - private_outlet + - private_over_fileprivate +# - prohibited_interface_builder + - quick_discouraged_call + - redundant_nil_coalescing + - redundant_type_annotation + - required_enum_case + - shorthand_operator + - sorted_first_last + - sorted_imports + - static_operator +# - strict_fileprivate + - switch_case_on_newline + - toggle_bool +# - trailing_closure + - unavailable_function + - unneeded_parentheses_in_closure_argument + - untyped_error_in_catch + - vertical_parameter_alignment_on_call +# - vertical_whitespace_between_cases + - vertical_whitespace_closing_braces + - vertical_whitespace_opening_braces +# - yoda_condition diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj new file mode 100644 index 000000000..091cdc0c7 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.pbxproj @@ -0,0 +1,1287 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 70; + objects = { + +/* Begin PBXBuildFile section */ + BC05F96E2E3D248A002BCC92 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; }; + BC4B7DCD2E86D06700973BD7 /* ReplayKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */; }; + BC4B7DD42E86D06700973BD7 /* Screencast.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = BC4B7DCC2E86D06700973BD7 /* Screencast.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; + BC4B7DE02E86D13C00973BD7 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DDF2E86D13C00973BD7 /* HaishinKit */; }; + BC4B7DE22E86D13C00973BD7 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */; }; + BC4B7DE42E86D13C00973BD7 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */; }; + BC4B7DE62E86D13C00973BD7 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */; }; + BC4B7DE82E86D13C00973BD7 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */; }; + BC4B7DE92E86D14A00973BD7 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; }; + BC6842B62E3D1294008FBFEE /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B52E3D1294008FBFEE /* HaishinKit */; }; + BC6842B82E3D1294008FBFEE /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */; }; + BC6842BA2E3D1294008FBFEE /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */; }; + BC6842BC2E3D1294008FBFEE /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */; }; + BC82125D2EB8FB1500419D06 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC82125C2EB8FB1500419D06 /* HaishinKit */; }; + BC82125F2EB8FB1C00419D06 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */; }; + BC8212612EB8FB2000419D06 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212602EB8FB2000419D06 /* RTMPHaishinKit */; }; + BC8212632EB8FB2400419D06 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212622EB8FB2400419D06 /* SRTHaishinKit */; }; + BC8212652EB8FB2D00419D06 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */; }; + BC8212662EB8FB5200419D06 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; }; + BC91298C2D12EB5E0077EFF3 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; }; + BC912A812D12F10C0077EFF3 /* libc++.1.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = BC912A802D12F0900077EFF3 /* libc++.1.tbd */; }; + BCD2478C2E54C04E00C64280 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD2478B2E54C04E00C64280 /* RTCHaishinKit */; }; + BCD917072D3A94BC00D30743 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD917062D3A94BC00D30743 /* HaishinKit */; }; + BCD917092D3A94BC00D30743 /* MoQTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD917082D3A94BC00D30743 /* MoQTHaishinKit */; }; + BCD9170B2D3A94BC00D30743 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */; }; + BCEC2BC02E104D9A00422F8F /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */; }; + BCFE628F2E7710A100941209 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9129382D12EA820077EFF3 /* Preference.swift */; }; + BCFE62912E7710D800941209 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62902E7710D800941209 /* HaishinKit */; }; + BCFE62A82E77179F00941209 /* RTCHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62A72E77179F00941209 /* RTCHaishinKit */; }; + BCFE62AA2E77179F00941209 /* RTMPHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62A92E77179F00941209 /* RTMPHaishinKit */; }; + BCFE62AC2E77179F00941209 /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = BCFE62AB2E77179F00941209 /* SRTHaishinKit */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + BC4B7DD22E86D06700973BD7 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 2945CBB41B4BE66000104112 /* Project object */; + proxyType = 1; + remoteGlobalIDString = BC4B7DCB2E86D06700973BD7; + remoteInfo = Screencast; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + BC7A0E592B0894B9005FB2F7 /* Embed Frameworks */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Embed Frameworks"; + runOnlyForDeploymentPostprocessing = 0; + }; + BCD5C8062E3F189700A69F16 /* Embed Foundation Extensions */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 13; + files = ( + BC4B7DD42E86D06700973BD7 /* Screencast.appex in Embed Foundation Extensions */, + ); + name = "Embed Foundation Extensions"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + BC4B7DCC2E86D06700973BD7 /* Screencast.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = Screencast.appex; sourceTree = BUILT_PRODUCTS_DIR; }; + BC6841622E3CFBE0008FBFEE /* Example iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example visionOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + BC82124C2EB8F8BE00419D06 /* Example tvOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example tvOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + BC91287D2D12E7540077EFF3 /* Examples.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = Examples.xcodeproj; sourceTree = ""; }; + BC9128F82D12E9970077EFF3 /* Examples.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = Examples.xcodeproj; sourceTree = ""; }; + BC9129382D12EA820077EFF3 /* Preference.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Preference.swift; sourceTree = ""; }; + BC912A7F2D12F0830077EFF3 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "Platforms/XROS.platform/Developer/SDKs/XROS2.1.sdk/usr/lib/libc++.tbd"; sourceTree = DEVELOPER_DIR; }; + BC912A802D12F0900077EFF3 /* libc++.1.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.1.tbd"; path = "Platforms/XROS.platform/Developer/SDKs/XROS2.1.sdk/usr/lib/libc++.1.tbd"; sourceTree = DEVELOPER_DIR; }; + BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ReplayKit.framework; path = System/Library/Frameworks/ReplayKit.framework; sourceTree = SDKROOT; }; + BCD916432D3A934400D30743 /* HaishinKit */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = HaishinKit; path = ../; sourceTree = SOURCE_ROOT; }; + BCFE627E2E770A9900941209 /* Example macOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example macOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ + BC36B4F82E77201200AADEC1 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + PreferenceView.swift, + PreferenceViewModel.swift, + VisualEffect.swift, + ); + target = BCFE627D2E770A9900941209 /* Example macOS */; + }; + BC4B7DDD2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Info.plist, + Screencast/Info.plist, + Screencast/SampleHandler.swift, + ); + target = BC6841612E3CFBE0008FBFEE /* Example iOS */; + }; + BC4B7DDE2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Screencast/SampleHandler.swift, + ); + target = BC4B7DCB2E86D06700973BD7 /* Screencast */; + }; + BC8212582EB8FA8A00419D06 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + PlaybackView.swift, + PlaybackViewModel.swift, + PreferenceView.swift, + PreferenceViewModel.swift, + ); + target = BC82124B2EB8F8BE00419D06 /* Example tvOS */; + }; + BC912A722D12F0100077EFF3 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + ContentView.swift, + HaishinApp.swift, + ); + target = BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */; + }; + BCABAC0D2E65BBBC00B0E878 /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + PlaybackView.swift, + PlaybackViewModel.swift, + ); + target = BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */; + }; +/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ + +/* Begin PBXFileSystemSynchronizedRootGroup section */ + BC6841632E3CFBE0008FBFEE /* iOS */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (BC4B7DDD2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC36B4F82E77201200AADEC1 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BCABAC0D2E65BBBC00B0E878 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC4B7DDE2E86D06E00973BD7 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, BC8212582EB8FA8A00419D06 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = iOS; sourceTree = ""; }; + BC82124D2EB8F8BE00419D06 /* tvOS */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = tvOS; sourceTree = ""; }; + BC9129712D12EA930077EFF3 /* visionOS */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (BC912A722D12F0100077EFF3 /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = visionOS; sourceTree = ""; }; + BCFE627F2E770A9900941209 /* macOS */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = macOS; sourceTree = ""; }; +/* End PBXFileSystemSynchronizedRootGroup section */ + +/* Begin PBXFrameworksBuildPhase section */ + BC4B7DC92E86D06700973BD7 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BC4B7DE22E86D13C00973BD7 /* MoQTHaishinKit in Frameworks */, + BC4B7DE62E86D13C00973BD7 /* RTMPHaishinKit in Frameworks */, + BC4B7DE82E86D13C00973BD7 /* SRTHaishinKit in Frameworks */, + BC4B7DE02E86D13C00973BD7 /* HaishinKit in Frameworks */, + BC4B7DE42E86D13C00973BD7 /* RTCHaishinKit in Frameworks */, + BC4B7DCD2E86D06700973BD7 /* ReplayKit.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC68415F2E3CFBE0008FBFEE /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BC6842B62E3D1294008FBFEE /* HaishinKit in Frameworks */, + BCD2478C2E54C04E00C64280 /* RTCHaishinKit in Frameworks */, + BC6842B82E3D1294008FBFEE /* MoQTHaishinKit in Frameworks */, + BC6842BC2E3D1294008FBFEE /* SRTHaishinKit in Frameworks */, + BC6842BA2E3D1294008FBFEE /* RTMPHaishinKit in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC7A0E3A2B088FA7005FB2F7 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BCD917092D3A94BC00D30743 /* MoQTHaishinKit in Frameworks */, + BCEC2BC02E104D9A00422F8F /* RTMPHaishinKit in Frameworks */, + BCD917072D3A94BC00D30743 /* HaishinKit in Frameworks */, + BCD9170B2D3A94BC00D30743 /* SRTHaishinKit in Frameworks */, + BC912A812D12F10C0077EFF3 /* libc++.1.tbd in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC8212492EB8F8BE00419D06 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BC82125F2EB8FB1C00419D06 /* RTCHaishinKit in Frameworks */, + BC82125D2EB8FB1500419D06 /* HaishinKit in Frameworks */, + BC8212632EB8FB2400419D06 /* SRTHaishinKit in Frameworks */, + BC8212652EB8FB2D00419D06 /* MoQTHaishinKit in Frameworks */, + BC8212612EB8FB2000419D06 /* RTMPHaishinKit in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BCFE627B2E770A9900941209 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BCFE62912E7710D800941209 /* HaishinKit in Frameworks */, + BCFE62AC2E77179F00941209 /* SRTHaishinKit in Frameworks */, + BCFE62AA2E77179F00941209 /* RTMPHaishinKit in Frameworks */, + BCFE62A82E77179F00941209 /* RTCHaishinKit in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 2945CBB31B4BE66000104112 = { + isa = PBXGroup; + children = ( + BC9129382D12EA820077EFF3 /* Preference.swift */, + BC912A762D12F0600077EFF3 /* Frameworks */, + 2945CBBE1B4BE66000104112 /* Products */, + BC6841632E3CFBE0008FBFEE /* iOS */, + BCFE627F2E770A9900941209 /* macOS */, + BC82124D2EB8F8BE00419D06 /* tvOS */, + BC9129712D12EA930077EFF3 /* visionOS */, + ); + sourceTree = ""; + wrapsLines = 0; + }; + 2945CBBE1B4BE66000104112 /* Products */ = { + isa = PBXGroup; + children = ( + BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */, + BC6841622E3CFBE0008FBFEE /* Example iOS.app */, + BCFE627E2E770A9900941209 /* Example macOS.app */, + BC4B7DCC2E86D06700973BD7 /* Screencast.appex */, + BC82124C2EB8F8BE00419D06 /* Example tvOS.app */, + ); + name = Products; + sourceTree = ""; + }; + BC9128802D12E7540077EFF3 /* Products */ = { + isa = PBXGroup; + name = Products; + sourceTree = ""; + }; + BC9128FB2D12E9970077EFF3 /* Products */ = { + isa = PBXGroup; + name = Products; + sourceTree = ""; + }; + BC912A762D12F0600077EFF3 /* Frameworks */ = { + isa = PBXGroup; + children = ( + BC912A802D12F0900077EFF3 /* libc++.1.tbd */, + BC912A7F2D12F0830077EFF3 /* libc++.tbd */, + BCD916432D3A934400D30743 /* HaishinKit */, + BCD5C7F92E3F189700A69F16 /* ReplayKit.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + BC4B7DCB2E86D06700973BD7 /* Screencast */ = { + isa = PBXNativeTarget; + buildConfigurationList = BC4B7DD62E86D06700973BD7 /* Build configuration list for PBXNativeTarget "Screencast" */; + buildPhases = ( + BC4B7DC82E86D06700973BD7 /* Sources */, + BC4B7DC92E86D06700973BD7 /* Frameworks */, + BC4B7DCA2E86D06700973BD7 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = Screencast; + packageProductDependencies = ( + BC4B7DDF2E86D13C00973BD7 /* HaishinKit */, + BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */, + BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */, + BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */, + BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */, + ); + productName = Screencast; + productReference = BC4B7DCC2E86D06700973BD7 /* Screencast.appex */; + productType = "com.apple.product-type.app-extension"; + }; + BC6841612E3CFBE0008FBFEE /* Example iOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = BC68416B2E3CFBE2008FBFEE /* Build configuration list for PBXNativeTarget "Example iOS" */; + buildPhases = ( + BC68415E2E3CFBE0008FBFEE /* Sources */, + BC68415F2E3CFBE0008FBFEE /* Frameworks */, + BC6841602E3CFBE0008FBFEE /* Resources */, + BCD5C8062E3F189700A69F16 /* Embed Foundation Extensions */, + ); + buildRules = ( + ); + dependencies = ( + BC4B7DD32E86D06700973BD7 /* PBXTargetDependency */, + ); + fileSystemSynchronizedGroups = ( + BC6841632E3CFBE0008FBFEE /* iOS */, + ); + name = "Example iOS"; + packageProductDependencies = ( + BC6842B52E3D1294008FBFEE /* HaishinKit */, + BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */, + BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */, + BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */, + BCD2478B2E54C04E00C64280 /* RTCHaishinKit */, + ); + productName = "Example iOS"; + productReference = BC6841622E3CFBE0008FBFEE /* Example iOS.app */; + productType = "com.apple.product-type.application"; + }; + BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = BC7A0E542B088FA9005FB2F7 /* Build configuration list for PBXNativeTarget "Example visionOS" */; + buildPhases = ( + BC7A0E392B088FA7005FB2F7 /* Sources */, + BC7A0E3A2B088FA7005FB2F7 /* Frameworks */, + BC7A0E3B2B088FA7005FB2F7 /* Resources */, + BC7A0E592B0894B9005FB2F7 /* Embed Frameworks */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "Example visionOS"; + packageProductDependencies = ( + BCD917062D3A94BC00D30743 /* HaishinKit */, + BCD917082D3A94BC00D30743 /* MoQTHaishinKit */, + BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */, + BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */, + ); + productName = "Example visionOS"; + productReference = BC7A0E3D2B088FA7005FB2F7 /* Example visionOS.app */; + productType = "com.apple.product-type.application"; + }; + BC82124B2EB8F8BE00419D06 /* Example tvOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = BC8212542EB8F8BF00419D06 /* Build configuration list for PBXNativeTarget "Example tvOS" */; + buildPhases = ( + BC8212482EB8F8BE00419D06 /* Sources */, + BC8212492EB8F8BE00419D06 /* Frameworks */, + BC82124A2EB8F8BE00419D06 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + BC82124D2EB8F8BE00419D06 /* tvOS */, + ); + name = "Example tvOS"; + packageProductDependencies = ( + BC82125C2EB8FB1500419D06 /* HaishinKit */, + BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */, + BC8212602EB8FB2000419D06 /* RTMPHaishinKit */, + BC8212622EB8FB2400419D06 /* SRTHaishinKit */, + BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */, + ); + productName = "Example tvOS"; + productReference = BC82124C2EB8F8BE00419D06 /* Example tvOS.app */; + productType = "com.apple.product-type.application"; + }; + BCFE627D2E770A9900941209 /* Example macOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = BCFE62872E770A9C00941209 /* Build configuration list for PBXNativeTarget "Example macOS" */; + buildPhases = ( + BCFE627A2E770A9900941209 /* Sources */, + BCFE627B2E770A9900941209 /* Frameworks */, + BCFE627C2E770A9900941209 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + BCFE627F2E770A9900941209 /* macOS */, + ); + name = "Example macOS"; + packageProductDependencies = ( + BCFE62902E7710D800941209 /* HaishinKit */, + BCFE62A72E77179F00941209 /* RTCHaishinKit */, + BCFE62A92E77179F00941209 /* RTMPHaishinKit */, + BCFE62AB2E77179F00941209 /* SRTHaishinKit */, + ); + productName = "Example macOS"; + productReference = BCFE627E2E770A9900941209 /* Example macOS.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 2945CBB41B4BE66000104112 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = YES; + LastSwiftMigration = 0700; + LastSwiftUpdateCheck = 2600; + LastUpgradeCheck = 1540; + ORGANIZATIONNAME = ""; + TargetAttributes = { + BC4B7DCB2E86D06700973BD7 = { + CreatedOnToolsVersion = 16.4; + }; + BC6841612E3CFBE0008FBFEE = { + CreatedOnToolsVersion = 16.4; + }; + BC7A0E3C2B088FA7005FB2F7 = { + CreatedOnToolsVersion = 15.1; + }; + BC82124B2EB8F8BE00419D06 = { + CreatedOnToolsVersion = 26.0.1; + }; + BCFE627D2E770A9900941209 = { + CreatedOnToolsVersion = 16.4; + }; + }; + }; + buildConfigurationList = 2945CBB71B4BE66000104112 /* Build configuration list for PBXProject "Examples" */; + compatibilityVersion = "Xcode 13.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 2945CBB31B4BE66000104112; + packageReferences = ( + ); + productRefGroup = 2945CBBE1B4BE66000104112 /* Products */; + projectDirPath = ""; + projectReferences = ( + { + ProductGroup = BC9128FB2D12E9970077EFF3 /* Products */; + ProjectRef = BC9128F82D12E9970077EFF3 /* Examples.xcodeproj */; + }, + { + ProductGroup = BC9128802D12E7540077EFF3 /* Products */; + ProjectRef = BC91287D2D12E7540077EFF3 /* Examples.xcodeproj */; + }, + ); + projectRoot = ""; + targets = ( + BC6841612E3CFBE0008FBFEE /* Example iOS */, + BCFE627D2E770A9900941209 /* Example macOS */, + BC7A0E3C2B088FA7005FB2F7 /* Example visionOS */, + BC4B7DCB2E86D06700973BD7 /* Screencast */, + BC82124B2EB8F8BE00419D06 /* Example tvOS */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + BC4B7DCA2E86D06700973BD7 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC6841602E3CFBE0008FBFEE /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC7A0E3B2B088FA7005FB2F7 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC82124A2EB8F8BE00419D06 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BCFE627C2E770A9900941209 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + BC4B7DC82E86D06700973BD7 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BC4B7DE92E86D14A00973BD7 /* Preference.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC68415E2E3CFBE0008FBFEE /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BC05F96E2E3D248A002BCC92 /* Preference.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC7A0E392B088FA7005FB2F7 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BC91298C2D12EB5E0077EFF3 /* Preference.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BC8212482EB8F8BE00419D06 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BC8212662EB8FB5200419D06 /* Preference.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BCFE627A2E770A9900941209 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BCFE628F2E7710A100941209 /* Preference.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + BC4B7DD32E86D06700973BD7 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = BC4B7DCB2E86D06700973BD7 /* Screencast */; + targetProxy = BC4B7DD22E86D06700973BD7 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin XCBuildConfiguration section */ + 2945CBD11B4BE66000104112 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + EXCLUDED_ARCHS = ""; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_SYMBOLS_PRIVATE_EXTERN = NO; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + MACOSX_DEPLOYMENT_TARGET = 10.15; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = ""; + SUPPORTED_PLATFORMS = macosx; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + TARGETED_DEVICE_FAMILY = "1,2"; + TVOS_DEPLOYMENT_TARGET = 13.0; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Debug; + }; + 2945CBD21B4BE66000104112 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + MACOSX_DEPLOYMENT_TARGET = 10.15; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = ""; + SUPPORTED_PLATFORMS = macosx; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + TARGETED_DEVICE_FAMILY = "1,2"; + TVOS_DEPLOYMENT_TARGET = 13.0; + VALIDATE_PRODUCT = YES; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Release; + }; + BC4B7DD72E86D06700973BD7 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = iOS/Screencast/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = HaishinKit; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + IPHONEOS_DEPLOYMENT_TARGET = 15.6; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@executable_path/../../Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS.Screencast"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = iphoneos; + SKIP_INSTALL = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + BC4B7DD82E86D06700973BD7 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = iOS/Screencast/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = HaishinKit; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + IPHONEOS_DEPLOYMENT_TARGET = 15.6; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@executable_path/../../Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS.Screencast"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = iphoneos; + SKIP_INSTALL = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; + BC68416C2E3CFBE2008FBFEE /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = iOS/HaishinApp.entitlements; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_HARDENED_RUNTIME = YES; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = iOS/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = HaishinKit; + INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming."; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming."; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content."; + "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; + "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; + "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 13.5; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + REGISTER_APP_GROUPS = YES; + SDKROOT = auto; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 6.0; + TARGETED_DEVICE_FAMILY = "1,2"; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Debug; + }; + BC68416D2E3CFBE2008FBFEE /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = iOS/HaishinApp.entitlements; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_HARDENED_RUNTIME = YES; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = iOS/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = HaishinKit; + INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming."; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming."; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content."; + "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES; + "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; + "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; + "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; + "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 13.5; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + REGISTER_APP_GROUPS = YES; + SDKROOT = auto; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 6.0; + TARGETED_DEVICE_FAMILY = "1,2"; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Release; + }; + BC7A0E522B088FA9005FB2F7 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = visionOS/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 10.15; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-visionOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = xros; + SUPPORTED_PLATFORMS = "xros xrsimulator"; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_STRICT_CONCURRENCY = complete; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2,7"; + TVOS_DEPLOYMENT_TARGET = 13.0; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Debug; + }; + BC7A0E532B088FA9005FB2F7 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = Examples/visionOS/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 10.15; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-visionOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = xros; + SUPPORTED_PLATFORMS = "xros xrsimulator"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_STRICT_CONCURRENCY = complete; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2,7"; + TVOS_DEPLOYMENT_TARGET = 13.0; + XROS_DEPLOYMENT_TARGET = 1.0; + }; + name = Release; + }; + BC8212552EB8F8BF00419D06 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image"; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UIUserInterfaceStyle = Automatic; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = appletvos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SUPPORTED_PLATFORMS = "appletvos appletvsimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = 3; + TVOS_DEPLOYMENT_TARGET = 15.0; + }; + name = Debug; + }; + BC8212562EB8F8BF00419D06 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image"; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UIUserInterfaceStyle = Automatic; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = appletvos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SUPPORTED_PLATFORMS = "appletvos appletvsimulator"; + SUPPORTS_MACCATALYST = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = 3; + TVOS_DEPLOYMENT_TARGET = 15.0; + }; + name = Release; + }; + BCFE62882E770A9C00941209 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = macOS/HaishinApp.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_ASSET_PATHS = macOS/SampleVideo_360x240_5mb.mp4; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_HARDENED_RUNTIME = YES; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSCameraUsageDescription = ""; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INFOPLIST_KEY_NSMicrophoneUsageDescription = ""; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 15.5; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-macOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + REGISTER_APP_GROUPS = YES; + SDKROOT = macosx; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + }; + name = Debug; + }; + BCFE62892E770A9C00941209 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = macOS/HaishinApp.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = macOS/SampleVideo_360x240_5mb.mp4; + DEVELOPMENT_TEAM = SUEQ2SZ2L5; + ENABLE_HARDENED_RUNTIME = YES; + ENABLE_PREVIEWS = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSCameraUsageDescription = ""; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INFOPLIST_KEY_NSMicrophoneUsageDescription = ""; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 15.5; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-macOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + REGISTER_APP_GROUPS = YES; + SDKROOT = macosx; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 2945CBB71B4BE66000104112 /* Build configuration list for PBXProject "Examples" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 2945CBD11B4BE66000104112 /* Debug */, + 2945CBD21B4BE66000104112 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BC4B7DD62E86D06700973BD7 /* Build configuration list for PBXNativeTarget "Screencast" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BC4B7DD72E86D06700973BD7 /* Debug */, + BC4B7DD82E86D06700973BD7 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BC68416B2E3CFBE2008FBFEE /* Build configuration list for PBXNativeTarget "Example iOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BC68416C2E3CFBE2008FBFEE /* Debug */, + BC68416D2E3CFBE2008FBFEE /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BC7A0E542B088FA9005FB2F7 /* Build configuration list for PBXNativeTarget "Example visionOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BC7A0E522B088FA9005FB2F7 /* Debug */, + BC7A0E532B088FA9005FB2F7 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BC8212542EB8F8BF00419D06 /* Build configuration list for PBXNativeTarget "Example tvOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BC8212552EB8F8BF00419D06 /* Debug */, + BC8212562EB8F8BF00419D06 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BCFE62872E770A9C00941209 /* Build configuration list for PBXNativeTarget "Example macOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BCFE62882E770A9C00941209 /* Debug */, + BCFE62892E770A9C00941209 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + +/* Begin XCSwiftPackageProductDependency section */ + BC4B7DDF2E86D13C00973BD7 /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = HaishinKit; + }; + BC4B7DE12E86D13C00973BD7 /* MoQTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = MoQTHaishinKit; + }; + BC4B7DE32E86D13C00973BD7 /* RTCHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTCHaishinKit; + }; + BC4B7DE52E86D13C00973BD7 /* RTMPHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTMPHaishinKit; + }; + BC4B7DE72E86D13C00973BD7 /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = SRTHaishinKit; + }; + BC6842B52E3D1294008FBFEE /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = HaishinKit; + }; + BC6842B72E3D1294008FBFEE /* MoQTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = MoQTHaishinKit; + }; + BC6842B92E3D1294008FBFEE /* RTMPHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTMPHaishinKit; + }; + BC6842BB2E3D1294008FBFEE /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = SRTHaishinKit; + }; + BC82125C2EB8FB1500419D06 /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = HaishinKit; + }; + BC82125E2EB8FB1C00419D06 /* RTCHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTCHaishinKit; + }; + BC8212602EB8FB2000419D06 /* RTMPHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTMPHaishinKit; + }; + BC8212622EB8FB2400419D06 /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = SRTHaishinKit; + }; + BC8212642EB8FB2D00419D06 /* MoQTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = MoQTHaishinKit; + }; + BCD2478B2E54C04E00C64280 /* RTCHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTCHaishinKit; + }; + BCD917062D3A94BC00D30743 /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = HaishinKit; + }; + BCD917082D3A94BC00D30743 /* MoQTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = MoQTHaishinKit; + }; + BCD9170A2D3A94BC00D30743 /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = SRTHaishinKit; + }; + BCEC2BBF2E104D9A00422F8F /* RTMPHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTMPHaishinKit; + }; + BCFE62902E7710D800941209 /* HaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = HaishinKit; + }; + BCFE62A72E77179F00941209 /* RTCHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTCHaishinKit; + }; + BCFE62A92E77179F00941209 /* RTMPHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = RTMPHaishinKit; + }; + BCFE62AB2E77179F00941209 /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + productName = SRTHaishinKit; + }; +/* End XCSwiftPackageProductDependency section */ + }; + rootObject = 2945CBB41B4BE66000104112 /* Project object */; +} diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 000000000..919434a62 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 000000000..18d981003 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved new file mode 100644 index 000000000..6db9ce555 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -0,0 +1,33 @@ +{ + "originHash" : "0623d92bee87e5013eb796ad9f159d469c5255adc372c70de458149479be7518", + "pins" : [ + { + "identity" : "logboard", + "kind" : "remoteSourceControl", + "location" : "https://github.com/shogo4405/Logboard.git", + "state" : { + "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50", + "version" : "2.6.0" + } + }, + { + "identity" : "swift-docc-plugin", + "kind" : "remoteSourceControl", + "location" : "https://github.com/swiftlang/swift-docc-plugin", + "state" : { + "revision" : "3e4f133a77e644a5812911a0513aeb7288b07d06", + "version" : "1.4.5" + } + }, + { + "identity" : "swift-docc-symbolkit", + "kind" : "remoteSourceControl", + "location" : "https://github.com/swiftlang/swift-docc-symbolkit", + "state" : { + "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34", + "version" : "1.0.0" + } + } + ], + "version" : 3 +} diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme new file mode 100644 index 000000000..8e4e9c162 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example iOS.xcscheme @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme new file mode 100644 index 000000000..6d4c5ded4 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example macOS.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme new file mode 100644 index 000000000..7ae70b686 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example tvOS.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme new file mode 100644 index 000000000..ae150d948 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Example visionOS.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme new file mode 100644 index 000000000..af14ce15c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Examples.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme @@ -0,0 +1,97 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Vendor/HaishinKit.swift/Examples/Package.swift b/Vendor/HaishinKit.swift/Examples/Package.swift new file mode 100644 index 000000000..47e7f05fc --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Package.swift @@ -0,0 +1,11 @@ +// swift-tools-version:5.10 + +// Leave blank. This is only here so that Xcode doesn't display it. + +import PackageDescription + +let package = Package( + name: "Examples", + products: [], + targets: [] +) diff --git a/Vendor/HaishinKit.swift/Examples/Preference.swift b/Vendor/HaishinKit.swift/Examples/Preference.swift new file mode 100644 index 000000000..a7abd005a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/Preference.swift @@ -0,0 +1,17 @@ +import Foundation + +struct Preference: Sendable { + // Temp + static nonisolated(unsafe) var `default` = Preference() + + // var uri = "http://192.168.1.14:1985/rtc/v1/whip/?app=live&stream=livestream" + var uri = "rtmp://192.168.1.7/live" + var streamName = "live" + + func makeURL() -> URL? { + if uri.contains("rtmp://") { + return URL(string: uri + "/" + streamName) + } + return URL(string: uri) + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 000000000..eb8789700 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png new file mode 100644 index 000000000..62e3de039 Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png differ diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 000000000..5081081c5 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,86 @@ +{ + "images" : [ + { + "filename" : "AppIcon.png", + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "dark" + } + ], + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "tinted" + } + ], + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "512x512" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "512x512" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift b/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift new file mode 100644 index 000000000..b8a675431 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/AudioEngineCapture.swift @@ -0,0 +1,62 @@ +import AVFoundation +import Foundation +import HaishinKit + +protocol AudioEngineCaptureDelegate: AnyObject { + func audioCapture(_ audioCapture: AudioEngineCapture, buffer: AVAudioPCMBuffer, time: AVAudioTime) +} + +final class AudioEngineCapture { + var delegate: (any AudioEngineCaptureDelegate)? + + private(set) var isRunning = false + private var audioEngine = AVAudioEngine() + + func startCaptureIfNeeded() { + guard isRunning else { + return + } + audioEngine.stop() + audioEngine.inputNode.removeTap(onBus: 0) + audioEngine = AVAudioEngine() + do { + try startCapture() + } catch { + logger.warn(error) + } + } + + private func startCapture() throws { + let input = audioEngine.inputNode + let mixer = audioEngine.mainMixerNode + audioEngine.connect(input, to: mixer, format: input.inputFormat(forBus: 0)) + input.installTap(onBus: 0, bufferSize: 1024, format: input.inputFormat(forBus: 0)) { buffer, when in + self.delegate?.audioCapture(self, buffer: buffer, time: when) + } + audioEngine.prepare() + try audioEngine.start() + } +} + +extension AudioEngineCapture: Runner { + // MARK: Runner + func startRunning() { + guard !isRunning else { + return + } + do { + try startCapture() + isRunning = true + } catch { + logger.error(error) + } + } + + func stopRunning() { + guard isRunning else { + return + } + audioEngine.stop() + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift b/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift new file mode 100644 index 000000000..dffd10c77 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/AudioSourceService.swift @@ -0,0 +1,218 @@ +@preconcurrency import AVFoundation +import Combine +import HaishinKit + +struct AudioSource: Sendable, Hashable, Equatable, CustomStringConvertible { + static let empty = AudioSource(portName: "", dataSourceName: "", isSupportedStereo: false) + + let portName: String + let dataSourceName: String + let isSupportedStereo: Bool + + var description: String { + if isSupportedStereo { + return "\(portName)(\(dataSourceName))(Stereo)" + } + return "\(portName)(\(dataSourceName))(Mono)" + } +} + +actor AudioSourceService { + enum Error: Swift.Error { + case missingDataSource(_ source: AudioSource) + } + + var buffer: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> { + AsyncStream { continuation in + bufferContinuation = continuation + } + } + + private(set) var mode: AudioSourceServiceMode = .audioEngine + private(set) var isRunning = false + private(set) var sources: [AudioSource] = [] { + didSet { + guard sources != oldValue else { + return + } + continuation?.yield(sources) + } + } + private let session = AVAudioSession.sharedInstance() + private var continuation: AsyncStream<[AudioSource]>.Continuation? { + didSet { + oldValue?.finish() + } + } + private var tasks: [Task] = [] + private var audioEngineCapture: AudioEngineCapture? { + didSet { + audioEngineCapture?.delegate = self + } + } + private var bufferContinuation: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)>.Continuation? { + didSet { + oldValue?.finish() + } + } + + func setUp(_ mode: AudioSourceServiceMode) { + self.mode = mode + do { + let session = AVAudioSession.sharedInstance() + // If you set the "mode" parameter, stereo capture is not possible, so it is left unspecified. + try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetoothHFP]) + try session.setActive(true) + // It looks like this setting is required on iOS 18.5. + try? session.setPreferredInputNumberOfChannels(2) + } catch { + logger.error(error) + } + } + + func sourcesUpdates() -> AsyncStream<[AudioSource]> { + AsyncStream { continuation in + self.continuation = continuation + continuation.yield(sources) + } + } + + func selectAudioSource(_ audioSource: AudioSource) throws { + setPreferredInputBuiltInMic(true) + guard let preferredInput = AVAudioSession.sharedInstance().preferredInput, + let dataSources = preferredInput.dataSources, + let newDataSource = dataSources.first(where: { $0.dataSourceName == audioSource.dataSourceName }), + let supportedPolarPatterns = newDataSource.supportedPolarPatterns else { + throw Error.missingDataSource(audioSource) + } + do { + let isStereoSupported = supportedPolarPatterns.contains(.stereo) + if isStereoSupported { + try newDataSource.setPreferredPolarPattern(.stereo) + } + try preferredInput.setPreferredDataSource(newDataSource) + } catch { + logger.warn(error) + } + } + + private func makeAudioSources() -> [AudioSource] { + if session.inputDataSources?.isEmpty == true { + setPreferredInputBuiltInMic(false) + } else { + setPreferredInputBuiltInMic(true) + } + guard let preferredInput = session.preferredInput else { + return [] + } + var sources: [AudioSource] = [] + for dataSource in session.preferredInput?.dataSources ?? [] { + sources.append(.init( + portName: preferredInput.portName, + dataSourceName: dataSource.dataSourceName, + isSupportedStereo: dataSource.supportedPolarPatterns?.contains(.stereo) ?? false + )) + } + return sources + } + + private func setPreferredInputBuiltInMic(_ isEnabled: Bool) { + do { + if isEnabled { + guard let availableInputs = session.availableInputs, + let builtInMicInput = availableInputs.first(where: { $0.portType == .builtInMic }) else { + return + } + try session.setPreferredInput(builtInMicInput) + } else { + try session.setPreferredInput(nil) + } + } catch { + logger.warn(error) + } + } +} + +extension AudioSourceService: AsyncRunner { + // MARK: AsyncRunner + func startRunning() async { + guard !isRunning else { + return + } + switch mode { + case .audioSource: + break + case .audioSourceWithStereo: + sources = makeAudioSources() + tasks.append(Task { + for await reason in NotificationCenter.default.notifications(named: AVAudioSession.routeChangeNotification) + .compactMap({ $0.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt }) + .compactMap({ AVAudioSession.RouteChangeReason(rawValue: $0) }) { + logger.info("route change ->", reason.rawValue) + sources = makeAudioSources() + } + }) + case .audioEngine: + audioEngineCapture = AudioEngineCapture() + audioEngineCapture?.startRunning() + tasks.append(Task { + for await reason in NotificationCenter.default.notifications(named: AVAudioSession.routeChangeNotification) + .compactMap({ $0.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt }) + .compactMap({ AVAudioSession.RouteChangeReason(rawValue: $0) }) { + // There are cases where it crashes when executed in situations other than attaching or detaching earphones. https://github.com/HaishinKit/HaishinKit.swift/issues/1863 + switch reason { + case .newDeviceAvailable, .oldDeviceUnavailable: + audioEngineCapture?.startCaptureIfNeeded() + default: () + } + } + }) + tasks.append(Task { + for await notification in NotificationCenter.default.notifications( + named: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance() + ) { + guard + let userInfo = notification.userInfo, + let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + return + } + switch type { + case .began: + logger.info("interruption began", notification) + case .ended: + logger.info("interruption end", notification) + let optionsValue = + userInfo[AVAudioSessionInterruptionOptionKey] as? UInt + let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue ?? 0) + if options.contains(.shouldResume) { + audioEngineCapture?.startCaptureIfNeeded() + } + default: () + } + } + }) + } + isRunning = true + } + + func stopRunning() async { + guard isRunning else { + return + } + audioEngineCapture?.stopRunning() + tasks.forEach { $0.cancel() } + tasks.removeAll() + isRunning = false + } +} + +extension AudioSourceService: AudioEngineCaptureDelegate { + // MARK: AudioEngineCaptureDelegate + nonisolated func audioCapture(_ audioCapture: AudioEngineCapture, buffer: AVAudioPCMBuffer, time: AVAudioTime) { + Task { + await bufferContinuation?.yield((buffer, time)) + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift new file mode 100644 index 000000000..e0e2608c6 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/ContentView.swift @@ -0,0 +1,33 @@ +import SwiftUI + +struct ContentView: View { + var body: some View { + TabView { + PreferenceView() + .tabItem { + Image(systemName: "person.circle") + Text("Preference") + } + + PublishView() + .tabItem { + Image(systemName: "record.circle") + Text("Publish") + } + + if #available(iOS 17.0, *), UIDevice.current.userInterfaceIdiom == .pad { + UVCView() + .tabItem { + Image(systemName: "record.circle") + Text("UVC Camera") + } + } + + PlaybackView() + .tabItem { + Image(systemName: "play.circle") + Text("Playback") + } + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements new file mode 100644 index 000000000..f2ef3ae02 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.entitlements @@ -0,0 +1,10 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.files.user-selected.read-only + + + diff --git a/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift new file mode 100644 index 000000000..5265b724a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/HaishinApp.swift @@ -0,0 +1,65 @@ +import HaishinKit +@preconcurrency import Logboard +import RTCHaishinKit +import RTMPHaishinKit +import SRTHaishinKit +import SwiftUI + +nonisolated let logger = LBLogger.with("com.haishinkit.HaishinApp") + +@main +struct HaishinApp: App { + @State private var preference = PreferenceViewModel() + @State private var isInitialized = false + + var body: some Scene { + WindowGroup { + if isInitialized { + ContentView() + .environmentObject(preference) + } else { + LaunchScreen() + .task { + await initialize() + isInitialized = true + } + } + } + } + + private func initialize() async { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) + await SessionBuilderFactory.shared.register(HTTPSessionFactory()) + + await RTCLogger.shared.setLevel(.debug) + await SRTLogger.shared.setLevel(.debug) + } + + init() { + LBLogger(kHaishinKitIdentifier).level = .debug + LBLogger(kRTCHaishinKitIdentifier).level = .debug + LBLogger(kRTMPHaishinKitIdentifier).level = .debug + LBLogger(kSRTHaishinKitIdentifier).level = .debug + } +} + +struct LaunchScreen: View { + var body: some View { + ZStack { + Color.black.ignoresSafeArea() + VStack(spacing: 20) { + Image(systemName: "video.fill") + .font(.system(size: 60)) + .foregroundColor(.white) + Text("HaishinKit") + .font(.title) + .fontWeight(.bold) + .foregroundColor(.white) + ProgressView() + .tint(.white) + .padding(.top, 20) + } + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Info.plist b/Vendor/HaishinKit.swift/Examples/iOS/Info.plist new file mode 100644 index 000000000..515c9376f --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Info.plist @@ -0,0 +1,11 @@ + + + + + UIBackgroundModes + + audio + voip + + + diff --git a/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift b/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift new file mode 100644 index 000000000..cd7b5db77 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/InfoGuideView.swift @@ -0,0 +1,151 @@ +import SwiftUI + +private enum InfoTab: String, CaseIterable { + case preference = "Preference" + case publish = "Publish" +} + +struct InfoGuideView: View { + @Binding var showingInfo: Bool + @State private var selectedTab: InfoTab = .preference + + var body: some View { + NavigationView { + VStack(spacing: 0) { + Picker("", selection: $selectedTab) { + ForEach(InfoTab.allCases, id: \.self) { tab in + Text(tab.rawValue).tag(tab) + } + } + .pickerStyle(.segmented) + .padding() + .padding(.top, 8) + + TabView(selection: $selectedTab) { + PreferenceGuideList() + .tag(InfoTab.preference) + PublishGuideList() + .tag(InfoTab.publish) + } + .tabViewStyle(.page(indexDisplayMode: .never)) + } + .navigationTitle("Help") + .navigationBarTitleDisplayMode(.inline) + .toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Done") { showingInfo = false } + } + } + } + } +} + +private struct PreferenceGuideList: View { + var body: some View { + List { + Section("Stream Settings") { + GuideRow(title: "URL", description: "RTMP server address (e.g., rtmp://your-server.com/live)") + GuideRow(title: "Stream Name", description: "Unique stream key provided by your streaming platform") + } + Section("Audio Settings") { + GuideRow(title: "Format", description: "AAC: Universal compatibility\nOpus: Better quality at low bitrates") + } + Section("Video Settings") { + GuideRow(title: "HDR Video", description: "Captures wider color/brightness range. Requires HDR-capable camera.") + GuideRow(title: "Low Latency", description: "Reduces stream delay to ~2-3 seconds. May slightly reduce quality.") + GuideRow(title: "BitRate Mode", description: "Average: Consistent file size\nConstant: Stable quality\nVariable: Best quality") + } + Section("Capture Settings") { + GuideRow(title: "Preview Type", description: "Metal: Fast GPU-based preview.\nSystem PiP: Enables background streaming. When you switch apps, receive a phone call, or go to home screen, your stream continues in a floating window instead of dying.") + GuideRow(title: "Audio Capture", description: "AudioEngine: Most stable\nAudioSource: Direct capture\nStereo: For external mics") + GuideRow(title: "GPU Rendering", description: "Uses GPU for video effects. Disable if experiencing issues.") + } + Section("Debug") { + GuideRow(title: "Memory Release Test", description: "Opens PublishView in a sheet to verify memory is properly released when dismissed to help detect memory leaks.") + } + } + } +} + +private struct PublishGuideList: View { + var body: some View { + List { + Section("Stream Settings") { + GuideRowWithIcon(icon: "15", isText: true, title: "FPS", + description: "Frames per second. 15 saves battery, 30 is standard, 60 is ultra-smooth.") + GuideRowWithIcon(icon: "slider.horizontal.3", title: "Bitrate (kbps)", + description: "Video quality. Higher = better but more data. 1500-2500 recommended.") + GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p", + description: "Video resolution (1280×720). Good balance of quality and performance.") + } + Section("Controls") { + GuideRowWithIcon(icon: "record.circle", title: "Record", + description: "Save a local copy to Photos. Only available while streaming.") + GuideRowWithIcon(icon: "mic.fill", title: "Mute", + description: "Mute/unmute microphone. Red when muted.") + GuideRowWithIcon(icon: "arrow.triangle.2.circlepath.camera", title: "Flip Camera", + description: "Switch between front and back cameras.") + GuideRowWithIcon(icon: "flashlight.on.fill", title: "Torch", + description: "Toggle flashlight. Only works with back camera.") + GuideRowWithIcon(icon: "rectangle.on.rectangle", title: "Dual Camera", + description: "Overlay the other camera in your stream. Viewers see both cameras.") + } + Section("Live Stats") { + GuideRowWithIcon(icon: "arrow.up", title: "Upload Speed", + description: "Current upload rate in KB/s. The graph shows last 60 seconds.") + GuideRowWithIcon(icon: "thermometer.medium", title: "Temperature", + description: "Device thermal state. Lower FPS/bitrate if too hot.") + } + } + } +} + +private struct GuideRow: View { + let title: String + let description: String + + var body: some View { + VStack(alignment: .leading, spacing: 4) { + Text(title).font(.headline) + Text(description) + .font(.caption) + .foregroundColor(.secondary) + } + .padding(.vertical, 2) + } +} + +private struct GuideRowWithIcon: View { + let icon: String + var isText: Bool = false + let title: String + let description: String + + var body: some View { + HStack(alignment: .top, spacing: 12) { + if isText { + Text(icon) + .font(.system(size: 14, weight: .bold)) + .foregroundColor(.cyan) + .frame(width: 28, height: 28) + .background(Color.cyan.opacity(0.2)) + .cornerRadius(6) + } else { + Image(systemName: icon) + .font(.system(size: 16)) + .foregroundColor(.cyan) + .frame(width: 28, height: 28) + .background(Color.cyan.opacity(0.2)) + .cornerRadius(6) + } + VStack(alignment: .leading, spacing: 2) { + Text(title) + .font(.subheadline.weight(.medium)) + Text(description) + .font(.caption) + .foregroundColor(.secondary) + } + } + .padding(.vertical, 4) + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift new file mode 100644 index 000000000..05704311b --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackView.swift @@ -0,0 +1,112 @@ +import AVFoundation +import HaishinKit +import SwiftUI + +struct PlaybackView: View { + @EnvironmentObject var preference: PreferenceViewModel + @StateObject private var model = PlaybackViewModel() + + var body: some View { + ZStack { + VStack { + switch preference.viewType { + case .metal: + MTHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill) + case .pip: + PiPHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill) + } + } + + VStack { + Spacer() + + if model.hasError { + VStack(spacing: 16) { + Image(systemName: "tv.slash") + .font(.system(size: 48)) + .foregroundColor(.white.opacity(0.7)) + + Text("Can't connect to stream") + .font(.headline) + .foregroundColor(.white) + + Text(model.friendlyErrorMessage) + .font(.subheadline) + .foregroundColor(.white.opacity(0.8)) + .multilineTextAlignment(.center) + .padding(.horizontal, 32) + + Button(action: { + model.dismissError() + }) { + Text("Try Again") + .font(.subheadline.bold()) + .foregroundColor(.white) + .padding(.horizontal, 24) + .padding(.vertical, 12) + .background(Color.blue) + .cornerRadius(8) + } + } + .padding(24) + .background(Color.black.opacity(0.8)) + .cornerRadius(16) + } + + Spacer() + + HStack { + Spacer() + switch model.readyState { + case .connecting: + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: .white)) + .scaleEffect(1.5) + .frame(width: 64, height: 64) + .background(Color.black.opacity(0.5)) + .cornerRadius(32) + .padding(16) + case .open: + Button(action: { + Task { + await model.stop() + } + }) { + Image(systemName: "stop.fill") + .foregroundColor(.white) + .font(.system(size: 24)) + } + .frame(width: 64, height: 64) + .background(Color.red) + .cornerRadius(32) + .padding(16) + case .closed, .closing: + if !model.hasError { + Button(action: { + Task { + await model.start() + } + }) { + Image(systemName: "play.fill") + .foregroundColor(.white) + .font(.system(size: 24)) + } + .frame(width: 64, height: 64) + .background(Color.blue) + .cornerRadius(32) + .padding(16) + } + } + } + } + } + .background(Color.black) + .task { + await model.makeSession() + } + } +} + +#Preview { + PlaybackView() +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift new file mode 100644 index 000000000..dcf0ee07c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PlaybackViewModel.swift @@ -0,0 +1,136 @@ +@preconcurrency import AVKit +import Combine +import HaishinKit +@preconcurrency import Logboard +import SwiftUI + +@MainActor +final class PlaybackViewModel: ObservableObject { + @Published private(set) var readyState: SessionReadyState = .closed + @Published private(set) var error: Error? + @Published var hasError = false + + var friendlyErrorMessage: String { + guard let error else { + return "Something went wrong. Please check your connection and try again." + } + + let errorString = String(describing: error).lowercased() + + if errorString.contains("unsupportedcommand") || errorString.contains("error 1") { + return "This server doesn't support watching streams directly. Most streaming servers (like Owncast) require you to watch via a web browser instead." + } else if errorString.contains("timeout") || errorString.contains("timedout") { + return "Connection timed out. The server may be offline or the stream URL might be incorrect." + } else if errorString.contains("invalidstate") { + return "Unable to connect. Please check that a stream is currently live." + } else if errorString.contains("connection") { + return "Couldn't reach the server. Check your internet connection and verify the stream URL in Preferences." + } else { + return "Unable to play this stream. The server may not support direct playback, or no stream is currently live." + } + } + + func dismissError() { + hasError = false + error = nil + } + + private var view: PiPHKView? + private var session: (any Session)? + private let audioPlayer = AudioPlayer(audioEngine: AVAudioEngine()) + private var pictureInPictureController: AVPictureInPictureController? + + func start() async { + guard let session else { + return + } + do { + try await session.connect { + Task { @MainActor in + self.hasError = true + } + } + } catch { + self.error = error + self.hasError = true + } + } + + func stop() async { + do { + try await session?.close() + } catch { + logger.error(error) + } + } + + func makeSession() async { + do { + session = try await SessionBuilderFactory.shared.make(Preference.default.makeURL()) + .setMode(.playback) + .build() + await session?.setMaxRetryCount(0) + guard let session else { + return + } + if let view { + await session.stream.addOutput(view) + } + await session.stream.attachAudioPlayer(audioPlayer) + Task { + for await readyState in await session.readyState { + self.readyState = readyState + switch readyState { + case .open: + UIApplication.shared.isIdleTimerDisabled = false + default: + UIApplication.shared.isIdleTimerDisabled = true + } + } + } + } catch { + logger.error(error) + } + } +} + +extension PlaybackViewModel: MTHKViewRepresentable.PreviewSource { + // MARK: MTHKViewRepresentable.PreviewSource + nonisolated func connect(to view: MTHKView) { + Task { @MainActor in + } + } +} + +extension PlaybackViewModel: PiPHKViewRepresentable.PreviewSource { + // MARK: PiPHKSwiftUiView.PreviewSource + nonisolated func connect(to view: HaishinKit.PiPHKView) { + Task { @MainActor in + self.view = view + if pictureInPictureController == nil { + pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: view.layer, playbackDelegate: PlaybackDelegate())) + } + } + } +} + +final class PlaybackDelegate: NSObject, AVPictureInPictureSampleBufferPlaybackDelegate { + // MARK: AVPictureInPictureControllerDelegate + func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, setPlaying playing: Bool) { + } + + func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange { + return CMTimeRange(start: .zero, duration: .positiveInfinity) + } + + func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool { + return false + } + + func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) { + } + + func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime, completion completionHandler: @escaping () -> Void) { + completionHandler() + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift new file mode 100644 index 000000000..8cb7de1da --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceView.swift @@ -0,0 +1,118 @@ +import HaishinKit +import SwiftUI + +struct InfoRow: View { + let title: String + let info: String + + var body: some View { + HStack { + Text(title) + Spacer() + Image(systemName: "info.circle") + .foregroundColor(.blue) + } + .contentShape(Rectangle()) + } +} + +struct PreferenceView: View { + @EnvironmentObject var model: PreferenceViewModel + @State private var showingInfo = false + + var body: some View { + Form { + Section { + HStack { + Text("URL") + .frame(width: 80, alignment: .leading) + .foregroundColor(.secondary) + TextField(Preference.default.uri, text: $model.uri) + }.padding(.vertical, 4) + HStack { + Text("Name") + .frame(width: 80, alignment: .leading) + .foregroundColor(.secondary) + TextField(Preference.default.streamName, text: $model.streamName) + }.padding(.vertical, 4) + } header: { + HStack { + Text("Stream") + Spacer() + Button(action: { showingInfo = true }) { + Image(systemName: "info.circle") + .font(.system(size: 22)) + .foregroundColor(.blue) + } + } + } + Section { + Picker("Format", selection: $model.audioFormat) { + ForEach(AudioCodecSettings.Format.allCases, id: \.self) { format in + Text(String(describing: format)).tag(format) + } + } + } header: { + Text("Audio Codec Settings") + } footer: { + Text("AAC is widely supported. Opus offers better quality at low bitrates.") + } + Section { + Toggle(isOn: $model.isHDREnabled) { + Text("HDR Video") + } + Toggle(isOn: $model.isLowLatencyRateControlEnabled) { + Text("Low Latency Mode") + } + Picker("BitRate Mode", selection: $model.bitRateMode) { + ForEach(model.bitRateModes, id: \.description) { index in + Text(index.description).tag(index) + } + } + } header: { + Text("Video Codec Settings") + } footer: { + Text("HDR captures wider color range. Low latency reduces delay but may affect quality. Average bitrate is recommended for most streams.") + } + Section { + Picker("Preview Type", selection: $model.viewType) { + ForEach(ViewType.allCases, id: \.self) { type in + Text(type.displayName).tag(type) + } + } + Picker("Audio Capture", selection: $model.audioCaptureMode) { + ForEach(AudioSourceServiceMode.allCases, id: \.self) { view in + Text(String(describing: view)).tag(view) + } + } + Toggle(isOn: $model.isGPURendererEnabled) { + Text("GPU Rendering") + } + } header: { + Text("Capture Settings") + } footer: { + Text("Metal preview is faster. AudioEngine mode is recommended for stability.") + } + Section { + Button(action: { + model.showPublishSheet.toggle() + }, label: { + Text("Memory release test for PublishView") + }).sheet(isPresented: $model.showPublishSheet, content: { + PublishView() + }) + } header: { + Text("Debug") + } + } + #if os(iOS) + .sheet(isPresented: $showingInfo) { + InfoGuideView(showingInfo: $showingInfo) + } + #endif + } +} + +#Preview { + PreferenceView() +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift new file mode 100644 index 000000000..3356ddd1c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PreferenceViewModel.swift @@ -0,0 +1,173 @@ +import Combine +import HaishinKit +import SwiftUI + +enum ViewType: String, CaseIterable, Identifiable { + case metal + case pip + + var id: Self { self } + + var displayName: String { + switch self { + case .metal: return "Metal" + case .pip: return "System PiP" + } + } +} + +enum AudioSourceServiceMode: String, CaseIterable, Sendable { + case audioSource + case audioSourceWithStereo + case audioEngine +} + +@MainActor +final class PreferenceViewModel: ObservableObject { + private enum Keys { + static let uri = "pref_stream_uri" + static let streamName = "pref_stream_name" + static let audioFormat = "pref_audio_format" + static let bitRateMode = "pref_bitrate_mode" + static let isLowLatencyEnabled = "pref_low_latency" + static let viewType = "pref_view_type" + static let isGPURendererEnabled = "pref_gpu_renderer" + static let audioCaptureMode = "pref_audio_capture_mode" + static let isDualCameraEnabled = "pref_dual_camera" + static let isHDREnabled = "pref_hdr_enabled" + } + + @Published var showPublishSheet: Bool = false + + @Published var uri: String { + didSet { + UserDefaults.standard.set(uri, forKey: Keys.uri) + } + } + @Published var streamName: String { + didSet { + UserDefaults.standard.set(streamName, forKey: Keys.streamName) + } + } + + private(set) var bitRateModes: [VideoCodecSettings.BitRateMode] = [.average] + + // MARK: - AudioCodecSettings. + @Published var audioFormat: AudioCodecSettings.Format = .aac { + didSet { + UserDefaults.standard.set(audioFormat.rawValue, forKey: Keys.audioFormat) + } + } + + // MARK: - VideoCodecSettings. + @Published var bitRateMode: VideoCodecSettings.BitRateMode = .average { + didSet { + UserDefaults.standard.set(bitRateMode.description, forKey: Keys.bitRateMode) + } + } + @Published var isLowLatencyRateControlEnabled: Bool = false { + didSet { + UserDefaults.standard.set(isLowLatencyRateControlEnabled, forKey: Keys.isLowLatencyEnabled) + } + } + + // MARK: - Others + @Published var viewType: ViewType = .metal { + didSet { + UserDefaults.standard.set(viewType.rawValue, forKey: Keys.viewType) + } + } + @Published var isGPURendererEnabled: Bool = true { + didSet { + UserDefaults.standard.set(isGPURendererEnabled, forKey: Keys.isGPURendererEnabled) + } + } + @Published var audioCaptureMode: AudioSourceServiceMode = .audioEngine { + didSet { + UserDefaults.standard.set(audioCaptureMode.rawValue, forKey: Keys.audioCaptureMode) + } + } + @Published var isDualCameraEnabled: Bool = true { + didSet { + UserDefaults.standard.set(isDualCameraEnabled, forKey: Keys.isDualCameraEnabled) + } + } + @Published var isHDREnabled: Bool = false { + didSet { + UserDefaults.standard.set(isHDREnabled, forKey: Keys.isHDREnabled) + } + } + + init() { + let defaults = UserDefaults.standard + + self.uri = defaults.string(forKey: Keys.uri) ?? Preference.default.uri + self.streamName = defaults.string(forKey: Keys.streamName) ?? Preference.default.streamName + + if let rawValue = defaults.string(forKey: Keys.audioFormat), + let format = AudioCodecSettings.Format(rawValue: rawValue) { + self.audioFormat = format + } + + if let savedMode = defaults.string(forKey: Keys.bitRateMode) { + if savedMode == VideoCodecSettings.BitRateMode.average.description { + self.bitRateMode = .average + } else if #available(iOS 16.0, tvOS 16.0, *), savedMode == VideoCodecSettings.BitRateMode.constant.description { + self.bitRateMode = .constant + } + } + + if defaults.object(forKey: Keys.isLowLatencyEnabled) != nil { + self.isLowLatencyRateControlEnabled = defaults.bool(forKey: Keys.isLowLatencyEnabled) + } + + if let rawValue = defaults.string(forKey: Keys.viewType), + let type = ViewType(rawValue: rawValue) { + self.viewType = type + } + + if defaults.object(forKey: Keys.isGPURendererEnabled) != nil { + self.isGPURendererEnabled = defaults.bool(forKey: Keys.isGPURendererEnabled) + } + + if let rawValue = defaults.string(forKey: Keys.audioCaptureMode), + let mode = AudioSourceServiceMode(rawValue: rawValue) { + self.audioCaptureMode = mode + } + + if defaults.object(forKey: Keys.isDualCameraEnabled) != nil { + self.isDualCameraEnabled = defaults.bool(forKey: Keys.isDualCameraEnabled) + } + + if defaults.object(forKey: Keys.isHDREnabled) != nil { + self.isHDREnabled = defaults.bool(forKey: Keys.isHDREnabled) + } + + if #available(iOS 16.0, tvOS 16.0, *) { + bitRateModes.append(.constant) + } + if #available(iOS 26.0, tvOS 26.0, macOS 26.0, *) { + bitRateModes.append(.variable) + } + } + + func makeVideoCodecSettings(_ settings: VideoCodecSettings) -> VideoCodecSettings { + var newSettings = settings + newSettings.bitRateMode = bitRateMode + newSettings.isLowLatencyRateControlEnabled = isLowLatencyRateControlEnabled + return newSettings + } + + func makeAudioCodecSettings(_ settings: AudioCodecSettings) -> AudioCodecSettings { + var newSettings = settings + newSettings.format = audioFormat + return newSettings + } + + func makeURL() -> URL? { + if uri.contains("rtmp://") { + return URL(string: uri + "/" + streamName) + } + return URL(string: uri) + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift new file mode 100644 index 000000000..3d6c30b8b --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PublishView.swift @@ -0,0 +1,619 @@ +import AVFoundation +import Charts +import HaishinKit +import SwiftUI + +enum FPS: String, CaseIterable, Identifiable { + case fps15 = "15" + case fps30 = "30" + case fps60 = "60" + + var frameRate: Float64 { + switch self { + case .fps15: + return 15 + case .fps30: + return 30 + case .fps60: + return 60 + } + } + + var id: Self { self } +} + +private func bitrateQuality(_ kbps: Double) -> String { + switch kbps { + case ..<1000: return "Low" + case 1000..<1500: return "SD" + case 1500..<2500: return "HD" + case 2500..<3500: return "High" + default: return "Ultra" + } +} + +enum VideoEffectItem: String, CaseIterable, Identifiable, Sendable { + case none + case monochrome + case warm + case vivid + + var id: Self { self } + + var displayName: String { + switch self { + case .none: return "Normal" + case .monochrome: return "B&W" + case .warm: return "Warm" + case .vivid: return "Vivid" + } + } + + func makeVideoEffect() -> VideoEffect? { + switch self { + case .none: + return nil + case .monochrome: + return MonochromeEffect() + case .warm: + return WarmEffect() + case .vivid: + return VividEffect() + } + } +} + +struct StreamButton: View { + let readyState: SessionReadyState + let onStart: () -> Void + let onStop: () -> Void + + @State private var isPulsing = false + @State private var countdown = 3 + @State private var countdownTimer: Timer? + + var body: some View { + Button(action: { + switch readyState { + case .closed: + onStart() + case .open: + onStop() + default: + break + } + }) { + ZStack { + if readyState == .open { + Circle() + .stroke(Color.red.opacity(0.5), lineWidth: 3) + .frame(width: 76, height: 76) + .scaleEffect(isPulsing ? 1.2 : 1.0) + .opacity(isPulsing ? 0 : 0.8) + .animation( + .easeInOut(duration: 1.0).repeatForever(autoreverses: false), + value: isPulsing + ) + } + + Circle() + .fill(buttonBackground) + .frame(width: 70, height: 70) + .shadow(color: shadowColor, radius: 8, x: 0, y: 4) + + VStack(spacing: 2) { + switch readyState { + case .connecting: + Text("\(countdown)") + .font(.system(size: 28, weight: .bold)) + .foregroundColor(.white) + case .closing: + Text("...") + .font(.system(size: 20, weight: .bold)) + .foregroundColor(.white) + case .open: + Image(systemName: "stop.fill") + .font(.system(size: 18, weight: .bold)) + .foregroundColor(.white) + Text("END") + .font(.system(size: 10, weight: .bold)) + .foregroundColor(.white) + case .closed: + Image(systemName: "dot.radiowaves.left.and.right") + .font(.system(size: 20, weight: .semibold)) + .foregroundColor(.white) + Text("GO LIVE") + .font(.system(size: 9, weight: .bold)) + .foregroundColor(.white) + } + } + } + } + .disabled(readyState == .connecting || readyState == .closing) + .onAppear { + if readyState == .open { + isPulsing = true + } + } + .onChange(of: readyState) { newState in + isPulsing = (newState == .open) + if newState == .connecting { + startCountdown() + } else { + stopCountdown() + } + } + .onDisappear { + stopCountdown() + } + } + + private func startCountdown() { + countdown = 3 + countdownTimer?.invalidate() + countdownTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { _ in + if countdown > 1 { + countdown -= 1 + } + } + } + + private func stopCountdown() { + countdownTimer?.invalidate() + countdownTimer = nil + countdown = 3 + } + + private var buttonBackground: LinearGradient { + switch readyState { + case .open: + return LinearGradient( + colors: [Color.red, Color.red.opacity(0.8)], + startPoint: .top, + endPoint: .bottom + ) + case .connecting, .closing: + return LinearGradient( + colors: [Color.orange, Color.orange.opacity(0.8)], + startPoint: .top, + endPoint: .bottom + ) + case .closed: + return LinearGradient( + colors: [Color.green, Color.green.opacity(0.7)], + startPoint: .top, + endPoint: .bottom + ) + } + } + + private var shadowColor: Color { + switch readyState { + case .open: + return Color.red.opacity(0.5) + case .connecting, .closing: + return Color.orange.opacity(0.5) + case .closed: + return Color.green.opacity(0.5) + } + } +} + +private func formatDuration(_ duration: TimeInterval) -> String { + let hours = Int(duration) / 3600 + let minutes = (Int(duration) % 3600) / 60 + let seconds = Int(duration) % 60 + if hours > 0 { + return String(format: "%d:%02d:%02d", hours, minutes, seconds) + } + return String(format: "%d:%02d", minutes, seconds) +} + +private func thermalStateText(_ state: ProcessInfo.ThermalState) -> String { + switch state { + case .nominal: return "Cool" + case .fair: return "Warm" + case .serious: return "Hot" + case .critical: return "Critical" + @unknown default: return "Unknown" + } +} + +private func thermalStateColor(_ state: ProcessInfo.ThermalState) -> Color { + switch state { + case .nominal: return .green + case .fair: return .yellow + case .serious: return .orange + case .critical: return .red + @unknown default: return .white + } +} + +struct StatusBadge: View { + let text: String + let color: Color + var textColor: Color = .white + + var body: some View { + Text(text) + .font(.system(size: 10, weight: .bold)) + .foregroundColor(textColor) + .padding(.horizontal, 6) + .padding(.vertical, 3) + .background(color) + .cornerRadius(4) + } +} + +struct SmallIconButton: View { + let icon: String + let color: Color + let action: () -> Void + + var body: some View { + Button(action: action) { + Image(systemName: icon) + .font(.system(size: 20)) + .foregroundColor(color) + .frame(width: 44, height: 44) + .background(Color.black.opacity(0.3)) + .cornerRadius(22) + } + } +} + +struct PublishView: View { + @Environment(\.horizontalSizeClass) private var horizontalSizeClass + @EnvironmentObject var preference: PreferenceViewModel + @StateObject private var model = PublishViewModel() + @State private var showFilterHint = true + @State private var showFilterChange = false + @State private var filterChangeId = 0 + + var body: some View { + ZStack { + VStack { + switch preference.viewType { + case .metal: + MTHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill) + case .pip: + PiPHKViewRepresentable(previewSource: model, videoGravity: .resizeAspectFill) + } + } + + if model.isLoading { + Color.black.opacity(0.6) + .ignoresSafeArea() + VStack(spacing: 16) { + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: .white)) + .scaleEffect(1.5) + Text("Loading Camera...") + .font(.headline) + .foregroundColor(.white) + } + } + + if showFilterHint && !model.isLoading { + VStack(spacing: 10) { + VStack(spacing: 8) { + HStack(spacing: 16) { + Image(systemName: "chevron.left") + Text(model.visualEffectItem.displayName) + .font(.system(size: 14, weight: .medium)) + Image(systemName: "chevron.right") + } + Text("Swipe to change filter") + .font(.system(size: 11)) + .foregroundColor(.white.opacity(0.7)) + } + .foregroundColor(.white) + .padding(.horizontal, 20) + .padding(.vertical, 12) + .background(Color.black.opacity(0.35)) + .cornerRadius(12) + HStack(spacing: 6) { + ForEach(VideoEffectItem.allCases) { effect in + Circle() + .fill(effect == model.visualEffectItem ? Color.white : Color.white.opacity(0.4)) + .frame(width: 6, height: 6) + } + } + } + .transition(.opacity) + .onAppear { + DispatchQueue.main.asyncAfter(deadline: .now() + 3) { + withAnimation(.easeOut(duration: 0.5)) { + showFilterHint = false + } + } + } + } + + if !showFilterHint { + VStack(spacing: 10) { + Text(model.visualEffectItem.displayName) + .font(.system(size: 18, weight: .semibold)) + .foregroundColor(.white) + .padding(.horizontal, 24) + .padding(.vertical, 14) + .background(Color.black.opacity(0.35)) + .cornerRadius(12) + HStack(spacing: 6) { + ForEach(VideoEffectItem.allCases) { effect in + Circle() + .fill(effect == model.visualEffectItem ? Color.white : Color.white.opacity(0.4)) + .frame(width: 6, height: 6) + } + } + } + .opacity(showFilterChange ? 1 : 0) + .animation(.easeOut(duration: 0.3), value: showFilterChange) + } + + VStack(spacing: 0) { + HStack(alignment: .top) { + VStack(alignment: .leading, spacing: 8) { + if model.readyState == .open { + HStack(spacing: 6) { + Circle() + .fill(Color.red) + .frame(width: 10, height: 10) + Text(formatDuration(model.streamDuration)) + .font(.system(size: 16, weight: .bold, design: .monospaced)) + .foregroundColor(.white) + } + .padding(.horizontal, 10) + .padding(.vertical, 6) + .background(Color.black.opacity(0.6)) + .cornerRadius(8) + } + + if !model.isLoading { + Text("720p") + .font(.system(size: 10, weight: .medium)) + .foregroundColor(.white) + .padding(.horizontal, 8) + .padding(.vertical, 4) + .background(Color.black.opacity(0.6)) + .cornerRadius(4) + } + + if !model.audioSources.isEmpty { + Picker("AudioSource", selection: $model.audioSource) { + ForEach(model.audioSources, id: \.description) { source in + Text(source.description).tag(source) + } + } + .frame(width: 180) + .background(Color.black.opacity(0.4)) + .cornerRadius(8) + } + } + + Spacer() + + VStack(alignment: .trailing, spacing: 8) { + HStack(spacing: 6) { + if model.readyState == .open { + StatusBadge(text: "LIVE", color: .red) + } + if model.isRecording { + StatusBadge(text: "REC", color: .orange) + } + if preference.isHDREnabled { + StatusBadge(text: "HDR", color: .purple) + } + if model.isAudioMuted { + StatusBadge(text: "MUTED", color: .gray) + } + if model.isTorchEnabled { + StatusBadge(text: "TORCH", color: .yellow, textColor: .black) + } + if model.visualEffectItem != .none { + StatusBadge(text: model.visualEffectItem.displayName.uppercased(), color: .cyan) + } + } + + if model.isVolumeOn { + Text("Volume up causes echo") + .font(.system(size: 10)) + .foregroundColor(.white) + .padding(.horizontal, 6) + .padding(.vertical, 3) + .background(Color.red.opacity(0.8)) + .cornerRadius(4) + } + } + } + .padding(16) + + Spacer() + + VStack(spacing: 10) { + if model.readyState == .open && !model.stats.isEmpty { + HStack(spacing: 8) { + HStack(spacing: 3) { + Image(systemName: "arrow.up") + .font(.system(size: 9, weight: .bold)) + Text("\(model.currentUploadKBps)") + .font(.system(size: 11, weight: .bold, design: .monospaced)) + Text("KB/s") + .font(.system(size: 8)) + .foregroundColor(.white.opacity(0.6)) + } + + Chart(model.stats) { + LineMark( + x: .value("time", $0.date), + y: .value("bytes", $0.currentBytesOutPerSecond) + ) + .foregroundStyle(Color.cyan) + .lineStyle(StrokeStyle(lineWidth: 1.5)) + } + .chartYAxis(.hidden) + .chartXAxis(.hidden) + .frame(height: 28) + + HStack(spacing: 3) { + Image(systemName: "thermometer.medium") + .font(.system(size: 9)) + Text(thermalStateText(model.thermalState)) + .font(.system(size: 10, weight: .medium)) + .foregroundColor(thermalStateColor(model.thermalState)) + } + } + .foregroundColor(.white) + .padding(.horizontal, 10) + .padding(.vertical, 6) + .background(Color.black.opacity(0.4)) + .cornerRadius(8) + } + + HStack(spacing: 0) { + HStack(spacing: 4) { + ForEach(FPS.allCases) { fps in + Button(action: { + model.currentFPS = fps + model.setFrameRate(fps.frameRate) + }) { + Text(fps.rawValue) + .font(.system(size: 15, weight: .semibold)) + .foregroundColor(model.currentFPS == fps ? .white : .white.opacity(0.5)) + .frame(width: 44, height: 44) + .background(model.currentFPS == fps ? Color.white.opacity(0.25) : Color.black.opacity(0.3)) + .cornerRadius(22) + } + } + } + + Spacer() + + HStack(spacing: 6) { + SmallIconButton(icon: model.isRecording ? "record.circle.fill" : "record.circle", + color: model.isRecording ? .orange : .white) { + model.toggleRecording() + } + .disabled(model.readyState != .open) + .opacity(model.readyState == .open ? 1.0 : 0.4) + + SmallIconButton(icon: model.isAudioMuted ? "mic.slash.fill" : "mic.fill", + color: model.isAudioMuted ? .red : .white) { + model.toggleAudioMuted() + } + + SmallIconButton(icon: "arrow.triangle.2.circlepath.camera", + color: .white) { + model.flipCamera() + } + + SmallIconButton(icon: model.isTorchEnabled ? "flashlight.on.fill" : "flashlight.off.fill", + color: model.isTorchEnabled ? .yellow : .white) { + model.toggleTorch() + } + .disabled(model.currentCamera == "Front") + .opacity(model.currentCamera == "Front" ? 0.4 : 1.0) + + SmallIconButton(icon: model.isDualCameraEnabled ? "rectangle.on.rectangle.fill" : "rectangle.on.rectangle", + color: model.isDualCameraEnabled ? .cyan : .white) { + model.toggleDualCamera() + } + } + } + + HStack(spacing: 12) { + VStack(alignment: .leading, spacing: 2) { + HStack(spacing: 4) { + Text("\(Int(model.videoBitRates))") + .font(.system(size: 12, weight: .semibold, design: .monospaced)) + .foregroundColor(.white) + Text("kbps") + .font(.system(size: 9)) + .foregroundColor(.white.opacity(0.5)) + Text("•") + .foregroundColor(.white.opacity(0.3)) + Text(bitrateQuality(model.videoBitRates)) + .font(.system(size: 9, weight: .semibold)) + .foregroundColor(.cyan) + } + Slider(value: $model.videoBitRates, in: 500...4000, step: 100) + .tint(.cyan) + } + + StreamButton( + readyState: model.readyState, + onStart: { model.showPreLiveDialog = true }, + onStop: { model.stopPublishing() } + ) + .confirmationDialog("Ready to Go Live?", isPresented: $model.showPreLiveDialog, titleVisibility: .visible) { + Button("Go Live with Recording") { + model.startPublishing(preference, withRecording: true) + } + Button("Go Live without Recording") { + model.startPublishing(preference, withRecording: false) + } + Button("Cancel", role: .cancel) { } + } message: { + Text("Recording saves a copy of your stream to Photos at \(Int(model.videoBitRates)) kbps.") + } + } + } + .padding(.horizontal, 16) + .padding(.bottom, 16) + .background( + LinearGradient( + colors: [.clear, .black.opacity(0.25), .black.opacity(0.5)], + startPoint: .top, + endPoint: .bottom + ) + ) + } + } + .onAppear { + model.startRunning(preference) + } + .onDisappear { + model.stopRunning() + } + .onChange(of: horizontalSizeClass) { _ in + model.orientationDidChange() + }.alert(isPresented: $model.isShowError) { + Alert( + title: Text("Error"), + message: Text(String(describing: model.error)), + dismissButton: .default(Text("OK")) + ) + } + .gesture( + DragGesture(minimumDistance: 50) + .onEnded { value in + if abs(value.translation.width) > abs(value.translation.height) { + let effects = VideoEffectItem.allCases + guard let currentIndex = effects.firstIndex(of: model.visualEffectItem) else { return } + let newIndex: Int + if value.translation.width < 0 { + newIndex = (currentIndex + 1) % effects.count + } else { + newIndex = (currentIndex - 1 + effects.count) % effects.count + } + let newEffect = effects[newIndex] + model.visualEffectItem = newEffect + model.setVisualEffet(newEffect) + filterChangeId += 1 + showFilterChange = true + let currentId = filterChangeId + Task { + try? await Task.sleep(for: .milliseconds(800)) + if filterChangeId == currentId { + showFilterChange = false + } + } + } + } + ) + } +} + +#Preview { + PublishView() +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift new file mode 100644 index 000000000..436df3eb7 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/PublishViewModel.swift @@ -0,0 +1,644 @@ +import AVFoundation +import AVKit +import HaishinKit +import MediaPlayer +import Photos +import RTCHaishinKit +import SwiftUI + +@MainActor +final class PublishViewModel: ObservableObject { + private enum Keys { + static let currentFPS = "publish_fps" + static let videoBitRates = "publish_bitrate" + } + + @Published var currentFPS: FPS = .fps30 { + didSet { + UserDefaults.standard.set(currentFPS.rawValue, forKey: Keys.currentFPS) + } + } + @Published var visualEffectItem: VideoEffectItem = .none + @Published private(set) var error: Error? { + didSet { + if error != nil { + isShowError = true + } + } + } + @Published var isShowError = false + @Published var showPreLiveDialog = false + @Published private(set) var isAudioMuted = false + @Published private(set) var isTorchEnabled = false + @Published private(set) var readyState: SessionReadyState = .closed + @Published var audioSource: AudioSource = .empty { + didSet { + guard audioSource != oldValue else { + return + } + selectAudioSource(audioSource) + } + } + @Published private(set) var audioSources: [AudioSource] = [] + @Published private(set) var isRecording = false + @Published private(set) var stats: [Stats] = [] + @Published private(set) var currentCamera: String = "Back" + @Published private(set) var isDualCameraEnabled: Bool = false + @Published private(set) var isVolumeOn: Bool = false + @Published private(set) var isLoading: Bool = true + @Published private(set) var videoDimensions: String = "" + @Published private(set) var batteryUsed: Float = 0 + @Published private(set) var streamDuration: TimeInterval = 0 + @Published private(set) var thermalState: ProcessInfo.ThermalState = .nominal + @Published private(set) var currentUploadKBps: Int = 0 + private var streamStartBattery: Float = 0 + private var streamStartTime: Date? + private var batteryTimer: Timer? + private var durationTimer: Timer? + @Published var videoBitRates: Double = 2000 { + didSet { + UserDefaults.standard.set(videoBitRates, forKey: Keys.videoBitRates) + Task { + guard let session else { + return + } + var videoSettings = await session.stream.videoSettings + videoSettings.bitRate = Int(videoBitRates * 1000) + try await session.stream.setVideoSettings(videoSettings) + } + } + } + private(set) var mixer = MediaMixer() + private var tasks: [Task] = [] + private var session: (any Session)? + private var recorder: StreamRecorder? + private var currentPosition: AVCaptureDevice.Position = .back + private var audioSourceService = AudioSourceService() + @ScreenActor private var videoScreenObject: VideoTrackScreenObject? + @ScreenActor private var currentVideoEffect: VideoEffect? + private var volumeObserver: NSKeyValueObservation? + private var mtView: MediaMixerOutput? + private var isMixerReady = false + private var pictureInPictureController: AVPictureInPictureController? + + init() { + let defaults = UserDefaults.standard + + if let rawValue = defaults.string(forKey: Keys.currentFPS), + let fps = FPS(rawValue: rawValue) { + self.currentFPS = fps + } + + if defaults.object(forKey: Keys.videoBitRates) != nil { + self.videoBitRates = defaults.double(forKey: Keys.videoBitRates) + } + + Task { @ScreenActor in + videoScreenObject = VideoTrackScreenObject() + } + } + + func startPublishing(_ preference: PreferenceViewModel, withRecording: Bool = false) { + Task { + guard let session else { + return + } + stats.removeAll() + + let recorder = StreamRecorder() + await mixer.addOutput(recorder) + self.recorder = recorder + + if withRecording { + do { + try await recorder.startRecording() + isRecording = true + } catch { + self.error = error + logger.warn(error) + } + } + + do { + try await session.connect { + Task { @MainActor in + self.isShowError = true + } + } + } catch { + self.error = error + logger.error(error) + } + } + } + + func stopPublishing() { + Task { + if isRecording { + do { + if let videoFile = try await recorder?.stopRecording() { + Task.detached { + try await PHPhotoLibrary.shared().performChanges { + let creationRequest = PHAssetCreationRequest.forAsset() + creationRequest.addResource(with: .video, fileURL: videoFile, options: nil) + } + } + } + } catch { + logger.warn(error) + } + isRecording = false + } + if let recorder { + await mixer.removeOutput(recorder) + self.recorder = nil + } + do { + try await session?.close() + } catch { + logger.error(error) + } + } + } + + func toggleRecording() { + if isRecording { + Task { + do { + if let videoFile = try await recorder?.stopRecording() { + Task.detached { + try await PHPhotoLibrary.shared().performChanges { + let creationRequest = PHAssetCreationRequest.forAsset() + creationRequest.addResource(with: .video, fileURL: videoFile, options: nil) + } + } + } + } catch let error as StreamRecorder.Error { + switch error { + case .failedToFinishWriting(let error): + self.error = error + if let error { + logger.warn(error) + } + default: + self.error = error + logger.warn(error) + } + } + isRecording = false + } + } else { + Task { + guard let recorder else { + logger.warn("Recorder not initialized") + return + } + do { + try await recorder.startRecording() + isRecording = true + } catch { + self.error = error + logger.warn(error) + } + for await error in await recorder.error { + switch error { + case .failedToAppend(let error): + self.error = error + default: + self.error = error + } + break + } + } + } + } + + func toggleAudioMuted() { + Task { + if isAudioMuted { + var settings = await mixer.audioMixerSettings + var track = settings.tracks[0] ?? .init() + track.isMuted = false + settings.tracks[0] = track + await mixer.setAudioMixerSettings(settings) + isAudioMuted = false + } else { + var settings = await mixer.audioMixerSettings + var track = settings.tracks[0] ?? .init() + track.isMuted = true + settings.tracks[0] = track + await mixer.setAudioMixerSettings(settings) + isAudioMuted = true + } + } + } + + func makeSession(_ preference: PreferenceViewModel) async { + do { + session = try await SessionBuilderFactory.shared.make(preference.makeURL()) + .setMode(.publish) + .build() + guard let session else { + return + } + var videoSettings = await session.stream.videoSettings + videoSettings.bitRate = Int(videoBitRates * 1000) + try? await session.stream.setVideoSettings(videoSettings) + await session.stream.setBitRateStrategy(StatsMonitor({ data in + Task { @MainActor in + self.stats.append(data) + if self.stats.count > 60 { + self.stats.removeFirst(self.stats.count - 60) + } + self.currentUploadKBps = data.currentBytesOutPerSecond / 1024 + } + })) + await mixer.addOutput(session.stream) + tasks.append(Task { + for await readyState in await session.readyState { + self.readyState = readyState + switch readyState { + case .open: + UIApplication.shared.isIdleTimerDisabled = false + self.startBatteryTracking() + case .closed: + UIApplication.shared.isIdleTimerDisabled = true + self.stopBatteryTracking() + default: + UIApplication.shared.isIdleTimerDisabled = true + } + } + }) + } catch { + self.error = error + } + do { + if let session { + try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings)) + } + } catch { + self.error = error + } + do { + if let session { + try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings)) + } + } catch { + self.error = error + } + } + + func startRunning(_ preference: PreferenceViewModel) { + isMixerReady = false + isDualCameraEnabled = false + + let isGPURendererEnabled = preference.isGPURendererEnabled + + Task { + tasks.forEach { $0.cancel() } + tasks.removeAll() + + await audioSourceService.stopRunning() + await mixer.stopRunning() + try? await mixer.attachAudio(nil) + try? await mixer.attachVideo(nil, track: 0) + try? await mixer.attachVideo(nil, track: 1) + if let session { + await mixer.removeOutput(session.stream) + try? await session.close() + } + session = nil + + mixer = MediaMixer(captureSessionMode: .multi) + + let viewType = preference.viewType + await mixer.configuration { session in + if session.isMultitaskingCameraAccessSupported && viewType == .pip { + session.isMultitaskingCameraAccessEnabled = true + logger.info("session.isMultitaskingCameraAccessEnabled") + } + } + + let audioCaptureMode = preference.audioCaptureMode + await audioSourceService.setUp(preference.audioCaptureMode) + await mixer.configuration { session in + switch audioCaptureMode { + case .audioSource: + session.automaticallyConfiguresApplicationAudioSession = true + case .audioSourceWithStereo: + session.automaticallyConfiguresApplicationAudioSession = false + case .audioEngine: + session.automaticallyConfiguresApplicationAudioSession = true + } + } + await mixer.setMonitoringEnabled(DeviceUtil.isHeadphoneConnected()) + var videoMixerSettings = await mixer.videoMixerSettings + videoMixerSettings.mode = .offscreen + await mixer.setVideoMixerSettings(videoMixerSettings) + + await configureScreen(isGPURendererEnabled: isGPURendererEnabled) + + let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + try? await mixer.attachVideo(backCamera, track: 0) { videoUnit in + videoUnit.isVideoMirrored = false + } + try? await mixer.attachVideo(frontCamera, track: 1) { videoUnit in + videoUnit.isVideoMirrored = true + } + var videoMixerSettings2 = await mixer.videoMixerSettings + videoMixerSettings2.mainTrack = currentPosition == .front ? 1 : 0 + await mixer.setVideoMixerSettings(videoMixerSettings2) + currentCamera = currentPosition == .front ? "Front" : "Back" + if audioCaptureMode == .audioSource { + try? await mixer.attachAudio(AVCaptureDevice.default(for: .audio)) + } + await audioSourceService.startRunning() + await mixer.startRunning() + + isMixerReady = true + if let mtView { + await mixer.addOutput(mtView) + } + + do { + if preference.isHDREnabled { + try await mixer.setDynamicRangeMode(.hdr) + } else { + try await mixer.setDynamicRangeMode(.sdr) + } + } catch { + logger.info(error) + } + await makeSession(preference) + let isLandscape = await UIDevice.current.orientation.isLandscape + await updateVideoEncoderSize(isLandscape: isLandscape) + let screenSize = await mixer.screen.size + if let session = self.session { + let videoSettings = await session.stream.videoSettings + self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)" + } + isLoading = false + } + orientationDidChange() + tasks.append(Task { + for await buffer in await audioSourceService.buffer { + await mixer.append(buffer.0, when: buffer.1) + } + }) + tasks.append(Task { + for await sources in await audioSourceService.sourcesUpdates() { + audioSources = sources + if let first = sources.first, audioSource == .empty { + audioSource = first + } + } + }) + startVolumeMonitoring() + } + + @ScreenActor + private func configureScreen(isGPURendererEnabled: Bool) async { + await mixer.screen.isGPURendererEnabled = isGPURendererEnabled + await mixer.screen.size = .init(width: 720, height: 1280) + await mixer.screen.backgroundColor = UIColor.black.cgColor + } + + private func startVolumeMonitoring() { + let audioSession = AVAudioSession.sharedInstance() + try? audioSession.setActive(true) + isVolumeOn = audioSession.outputVolume > 0 + volumeObserver = audioSession.observe(\.outputVolume, options: [.new]) { [weak self] _, change in + Task { @MainActor in + if let volume = change.newValue { + self?.isVolumeOn = volume > 0 + } + } + } + } + + private func stopVolumeMonitoring() { + volumeObserver?.invalidate() + volumeObserver = nil + } + + func stopRunning() { + isMixerReady = false + stopVolumeMonitoring() + Task { + await audioSourceService.stopRunning() + await mixer.stopRunning() + try? await mixer.attachAudio(nil) + try? await mixer.attachVideo(nil, track: 0) + try? await mixer.attachVideo(nil, track: 1) + if let session { + await mixer.removeOutput(session.stream) + } + tasks.forEach { $0.cancel() } + tasks.removeAll() + } + } + + func flipCamera() { + Task { + var videoMixerSettings = await mixer.videoMixerSettings + if videoMixerSettings.mainTrack == 0 { + videoMixerSettings.mainTrack = 1 + await mixer.setVideoMixerSettings(videoMixerSettings) + currentPosition = .front + currentCamera = "Front" + if isTorchEnabled { + await mixer.setTorchEnabled(false) + isTorchEnabled = false + } + Task { @ScreenActor in + videoScreenObject?.track = 0 + } + } else { + videoMixerSettings.mainTrack = 0 + await mixer.setVideoMixerSettings(videoMixerSettings) + currentPosition = .back + currentCamera = "Back" + Task { @ScreenActor in + videoScreenObject?.track = 1 + } + } + } + } + + func setVisualEffet(_ videoEffect: VideoEffectItem) { + Task { @ScreenActor in + if let currentVideoEffect { + _ = await mixer.screen.unregisterVideoEffect(currentVideoEffect) + } + if let videoEffect = videoEffect.makeVideoEffect() { + currentVideoEffect = videoEffect + _ = await mixer.screen.registerVideoEffect(videoEffect) + } + } + } + + func toggleTorch() { + Task { + await mixer.setTorchEnabled(!isTorchEnabled) + isTorchEnabled.toggle() + } + } + + func toggleDualCamera() { + let isEnabled = isDualCameraEnabled + let position = currentPosition + Task { @ScreenActor in + if isEnabled { + if let videoScreenObject { + try? await mixer.screen.removeChild(videoScreenObject) + } + await MainActor.run { isDualCameraEnabled = false } + } else { + if let videoScreenObject { + videoScreenObject.size = .init(width: 400, height: 224) + videoScreenObject.cornerRadius = 8.0 + videoScreenObject.track = position == .front ? 0 : 1 + videoScreenObject.verticalAlignment = .top + videoScreenObject.horizontalAlignment = .right + videoScreenObject.layoutMargin = .init(top: 32, left: 0, bottom: 0, right: 32) + videoScreenObject.invalidateLayout() + try? await mixer.screen.addChild(videoScreenObject) + } + await MainActor.run { isDualCameraEnabled = true } + } + } + } + + func setFrameRate(_ fps: Float64) { + Task { + do { + try? await mixer.configuration(video: 0) { video in + do { + try video.setFrameRate(fps) + } catch { + logger.error(error) + } + } + try? await mixer.configuration(video: 1) { video in + do { + try video.setFrameRate(fps) + } catch { + logger.error(error) + } + } + try await mixer.setFrameRate(fps) + if var videoSettings = await session?.stream.videoSettings { + videoSettings.expectedFrameRate = fps + try? await session?.stream.setVideoSettings(videoSettings) + } + } catch { + logger.error(error) + } + } + } + + func orientationDidChange() { + Task { @ScreenActor in + await mixer.setVideoOrientation(.portrait) + await mixer.screen.size = .init(width: 720, height: 1280) + let screenSize = await mixer.screen.size + Task { @MainActor in + await self.updateVideoEncoderSize(isLandscape: false) + if let session = self.session { + let videoSettings = await session.stream.videoSettings + self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)" + } else { + self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height))" + } + } + } + } + + private func updateVideoEncoderSize(isLandscape: Bool) async { + guard let session else { return } + var videoSettings = await session.stream.videoSettings + let targetSize: CGSize = isLandscape + ? CGSize(width: 1280, height: 720) + : CGSize(width: 720, height: 1280) + if videoSettings.videoSize != targetSize { + videoSettings.videoSize = targetSize + try? await session.stream.setVideoSettings(videoSettings) + } + } + + private func startBatteryTracking() { + UIDevice.current.isBatteryMonitoringEnabled = true + streamStartBattery = UIDevice.current.batteryLevel + streamStartTime = Date() + batteryUsed = 0 + streamDuration = 0 + + durationTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in + Task { @MainActor in + self?.updateDuration() + } + } + + batteryTimer = Timer.scheduledTimer(withTimeInterval: 10, repeats: true) { [weak self] _ in + Task { @MainActor in + self?.updateBatteryStats() + } + } + } + + private func stopBatteryTracking() { + durationTimer?.invalidate() + durationTimer = nil + batteryTimer?.invalidate() + batteryTimer = nil + updateBatteryStats() + } + + private func updateDuration() { + guard let startTime = streamStartTime else { return } + streamDuration = Date().timeIntervalSince(startTime) + } + + private func updateBatteryStats() { + let currentBattery = UIDevice.current.batteryLevel + if currentBattery >= 0 && streamStartBattery >= 0 { + batteryUsed = (streamStartBattery - currentBattery) * 100 + } + thermalState = ProcessInfo.processInfo.thermalState + } + + private func selectAudioSource(_ audioSource: AudioSource) { + Task { + try await audioSourceService.selectAudioSource(audioSource) + await mixer.stopCapturing() + try await mixer.attachAudio(AVCaptureDevice.default(for: .audio)) + await mixer.startCapturing() + } + } +} + +extension PublishViewModel: MTHKViewRepresentable.PreviewSource { + nonisolated func connect(to view: MTHKView) { + Task { @MainActor in + self.mtView = view + if isMixerReady { + await mixer.addOutput(view) + } + } + } +} + +extension PublishViewModel: PiPHKViewRepresentable.PreviewSource { + nonisolated func connect(to view: PiPHKView) { + Task { @MainActor in + self.mtView = view + if isMixerReady { + await mixer.addOutput(view) + } + if pictureInPictureController == nil { + pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: view.layer, playbackDelegate: PlaybackDelegate())) + } + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist new file mode 100644 index 000000000..e9367904d --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/Info.plist @@ -0,0 +1,15 @@ + + + + + NSExtension + + NSExtensionPointIdentifier + com.apple.broadcast-services-upload + NSExtensionPrincipalClass + $(PRODUCT_MODULE_NAME).SampleHandler + RPBroadcastProcessMode + RPBroadcastProcessModeSampleBuffer + + + diff --git a/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift new file mode 100644 index 000000000..2d02ab091 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/Screencast/SampleHandler.swift @@ -0,0 +1,100 @@ +import HaishinKit +@preconcurrency import Logboard +import MediaPlayer +import ReplayKit +import RTCHaishinKit +import RTMPHaishinKit +import SRTHaishinKit +import VideoToolbox + +nonisolated let logger = LBLogger.with("com.haishinkit.Screencast") + +final class SampleHandler: RPBroadcastSampleHandler, @unchecked Sendable { + private var slider: UISlider? + private var session: Session? + private var mixer = MediaMixer(captureSessionMode: .manual, multiTrackAudioMixingEnabled: true) + private var needVideoConfiguration = true + + override init() { + Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) + await SessionBuilderFactory.shared.register(HTTPSessionFactory()) + + await SRTLogger.shared.setLevel(.debug) + await RTCLogger.shared.setLevel(.info) + } + } + + override func broadcastStarted(withSetupInfo setupInfo: [String: NSObject]?) { + LBLogger.with(kHaishinKitIdentifier).level = .trace + LBLogger.with(kRTMPHaishinKitIdentifier).level = .trace + LBLogger.with(kSRTHaishinKitIdentifier).level = .trace + LBLogger.with(kRTCHaishinKitIdentifier).level = .trace + // mixer.audioMixerSettings.tracks[1] = .default + Task { + do { + session = try await SessionBuilderFactory.shared.make(Preference.default.makeURL()).build() + // ReplayKit is sensitive to memory, so we limit the queue to a maximum of five items. + var videoSetting = await mixer.videoMixerSettings + videoSetting.mode = .passthrough + await session?.stream.setVideoInputBufferCounts(5) + await mixer.setVideoMixerSettings(videoSetting) + await mixer.startRunning() + if let session { + await mixer.addOutput(session.stream) + try? await session.connect { + } + } + } catch { + logger.error(error) + } + } + // The volume of the audioApp can be obtained even when muted. A hack to synchronize with the volume. + DispatchQueue.main.async { + let volumeView = MPVolumeView(frame: CGRect.zero) + if let slider = volumeView.subviews.compactMap({ $0 as? UISlider }).first { + self.slider = slider + } + } + } + + override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) { + switch sampleBufferType { + case .video: + Task { + if needVideoConfiguration, let dimensions = sampleBuffer.formatDescription?.dimensions { + var videoSettings = await session?.stream.videoSettings + videoSettings?.videoSize = .init( + width: CGFloat(dimensions.width), + height: CGFloat(dimensions.height) + ) + videoSettings?.profileLevel = kVTProfileLevel_H264_Baseline_AutoLevel as String + if let videoSettings { + try? await session?.stream.setVideoSettings(videoSettings) + } + needVideoConfiguration = false + } + } + Task { await mixer.append(sampleBuffer) } + case .audioMic: + if sampleBuffer.dataReadiness == .ready { + Task { await mixer.append(sampleBuffer, track: 0) } + } + case .audioApp: + Task { @MainActor in + if let volume = slider?.value { + var audioMixerSettings = await mixer.audioMixerSettings + audioMixerSettings.tracks[1] = .default + audioMixerSettings.tracks[1]?.volume = volume * 0.5 + await mixer.setAudioMixerSettings(audioMixerSettings) + } + } + if sampleBuffer.dataReadiness == .ready { + Task { await mixer.append(sampleBuffer, track: 1) } + } + @unknown default: + break + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift b/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift new file mode 100644 index 000000000..91bd82785 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/StatsMonitor.swift @@ -0,0 +1,36 @@ +import Foundation +import HaishinKit + +struct Stats: Identifiable { + let date: Date + let currentBytesOutPerSecond: Int + let id: Int + + init(report: NetworkMonitorReport) { + currentBytesOutPerSecond = report.currentBytesOutPerSecond + date = Date() + id = Int(date.timeIntervalSince1970) + } +} + +struct StatsMonitor: StreamBitRateStrategy { + let mamimumVideoBitRate: Int = 0 + let mamimumAudioBitRate: Int = 0 + + private let callback: @Sendable (Stats) -> Void + + init(_ callback: @Sendable @escaping (Stats) -> Void) { + self.callback = callback + } + + func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async { + switch event { + case .status(let report): + callback(Stats(report: report)) + case .publishInsufficientBWOccured(let report): + callback(Stats(report: report)) + case .reset: + break + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift b/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift new file mode 100644 index 000000000..c0e992e03 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/UVCView.swift @@ -0,0 +1,100 @@ +import AVFoundation +import Charts +import HaishinKit +import SwiftUI + +@available(iOS 17.0, *) +struct UVCView: View { + @Environment(\.horizontalSizeClass) private var horizontalSizeClass + @EnvironmentObject var preference: PreferenceViewModel + @StateObject private var model = UVCViewModel() + + var body: some View { + ZStack { + VStack { + if preference.viewType == .pip { + PiPHKViewRepresentable(previewSource: model) + } else { + MTHKViewRepresentable(previewSource: model) + } + } + VStack { + HStack(spacing: 16) { + Spacer() + Button(action: { + model.toggleRecording() + }, label: { + Image(systemName: model.isRecording ? + "recordingtape.circle.fill" : + "recordingtape.circle") + .resizable() + .scaledToFit() + .foregroundColor(.white) + .frame(width: 30, height: 30) + }) + } + .frame(height: 50) + HStack { + Spacer() + Toggle(isOn: $model.isHDREnabled) { + Text("HDR") + }.frame(width: 120) + .pickerStyle(.segmented) + .frame(width: 150) + }.frame(height: 80) + Spacer() + HStack { + Spacer() + switch model.readyState { + case .connecting: + Spacer() + case .open: + Button(action: { + model.stopPublishing() + }, label: { + Image(systemName: "stop.circle") + .foregroundColor(.white) + .font(.system(size: 24)) + }) + .frame(width: 60, height: 60) + .background(Color.blue) + .cornerRadius(30.0) + .padding(EdgeInsets(top: 0, leading: 0, bottom: 16.0, trailing: 16.0)) + case .closing: + Spacer() + case .closed: + Button(action: { + model.startPublishing(preference) + }, label: { + Image(systemName: "record.circle") + .foregroundColor(.white) + .font(.system(size: 24)) + }) + .frame(width: 60, height: 60) + .background(Color.blue) + .cornerRadius(30.0) + .padding(EdgeInsets(top: 0, leading: 0, bottom: 0, trailing: 16.0)) + } + }.frame(maxWidth: .infinity) + } + } + .onAppear { + model.startRunning(preference) + } + .onDisappear { + model.stopRunning() + }.alert(isPresented: $model.isShowError) { + Alert( + title: Text("Error"), + message: Text(String(describing: model.error)), + dismissButton: .default(Text("OK")) + ) + } + } +} + +#Preview { + if #available(iOS 17.0, *) { + UVCView() + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift b/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift new file mode 100644 index 000000000..b88d635ef --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/UVCViewModel.swift @@ -0,0 +1,250 @@ +import AVFoundation +import HaishinKit +import Photos +import RTCHaishinKit +import SwiftUI + +@available(iOS 17.0, *) +@MainActor +final class UVCViewModel: ObservableObject { + @Published private(set) var error: Error? { + didSet { + if error != nil { + isShowError = true + } + } + } + @Published var isShowError = false + @Published private(set) var readyState: SessionReadyState = .closed + @Published private(set) var isRecording = false + @Published var isHDREnabled = false { + didSet { + Task { + do { + if isHDREnabled { + try await mixer.setDynamicRangeMode(.hdr) + } else { + try await mixer.setDynamicRangeMode(.sdr) + } + } catch { + logger.info(error) + } + } + } + } + // If you want to use the multi-camera feature, please make create a MediaMixer with a capture mode. + // let mixer = MediaMixer(captureSesionMode: .multi) + private(set) var mixer = MediaMixer(captureSessionMode: .single) + private var tasks: [Task] = [] + private var session: (any Session)? + private var recorder: StreamRecorder? + + init() { + NotificationCenter.default.addObserver( + forName: .AVCaptureDeviceWasConnected, + object: nil, + queue: .main + ) { notif in + guard let device = notif.object as? AVCaptureDevice else { return } + logger.info(device) + self.deviceConnected() + } + } + + func startPublishing(_ preference: PreferenceViewModel) { + Task { + guard let session else { + return + } + do { + try await session.connect { + Task { @MainActor in + self.isShowError = true + } + } + } catch { + self.error = error + logger.error(error) + } + } + } + + func stopPublishing() { + Task { + do { + try await session?.close() + } catch { + logger.error(error) + } + } + } + + func toggleRecording() { + if isRecording { + Task { + do { + // To use this in a product, you need to consider recovery procedures in case moving to the Photo Library fails. + if let videoFile = try await recorder?.stopRecording() { + Task.detached { + try await PHPhotoLibrary.shared().performChanges { + let creationRequest = PHAssetCreationRequest.forAsset() + creationRequest.addResource(with: .video, fileURL: videoFile, options: nil) + } + } + } + } catch let error as StreamRecorder.Error { + switch error { + case .failedToFinishWriting(let error): + self.error = error + if let error { + logger.warn(error) + } + default: + self.error = error + logger.warn(error) + } + } + recorder = nil + isRecording = false + } + } else { + Task { + let recorder = StreamRecorder() + await mixer.addOutput(recorder) + do { + // When starting a recording while connected to Xcode, it freezes for about 30 seconds. iOS26 + Xcode26. + try await recorder.startRecording() + isRecording = true + self.recorder = recorder + } catch { + self.error = error + logger.warn(error) + } + for await error in await recorder.error { + switch error { + case .failedToAppend(let error): + self.error = error + default: + self.error = error + } + break + } + } + } + } + + func makeSession(_ preference: PreferenceViewModel) async { + // Make session. + do { + session = try await SessionBuilderFactory.shared.make(preference.makeURL()) + .setMode(.publish) + .build() + guard let session else { + return + } + await mixer.addOutput(session.stream) + tasks.append(Task { + for await readyState in await session.readyState { + self.readyState = readyState + switch readyState { + case .open: + UIApplication.shared.isIdleTimerDisabled = false + default: + UIApplication.shared.isIdleTimerDisabled = true + } + } + }) + } catch { + self.error = error + } + do { + if let session { + try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings)) + } + } catch { + self.error = error + } + do { + if let session { + try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings)) + } + } catch { + self.error = error + } + } + + func startRunning(_ preference: PreferenceViewModel) { + let session = AVAudioSession.sharedInstance() + do { + try session.setCategory(.playAndRecord, mode: .videoRecording, options: [.defaultToSpeaker, .allowBluetoothHFP]) + try session.setActive(true) + } catch { + logger.error(error) + } + + Task { + var videoMixerSettings = await mixer.videoMixerSettings + videoMixerSettings.mode = .passthrough + await mixer.setVideoMixerSettings(videoMixerSettings) + // Attach devices + do { + try await mixer.attachVideo(AVCaptureDevice.default(.external, for: .video, position: .unspecified)) + } catch { + logger.error(error) + } + try? await mixer.attachAudio(AVCaptureDevice.default(for: .audio)) + await mixer.startRunning() + await makeSession(preference) + } + Task { @ScreenActor in + if await preference.isGPURendererEnabled { + await mixer.screen.isGPURendererEnabled = true + } else { + await mixer.screen.isGPURendererEnabled = false + } + await mixer.screen.size = .init(width: 720, height: 1280) + await mixer.screen.backgroundColor = UIColor.black.cgColor + } + } + + func stopRunning() { + Task { + await mixer.stopRunning() + try? await mixer.attachAudio(nil) + try? await mixer.attachVideo(nil) + if let session { + await mixer.removeOutput(session.stream) + } + tasks.forEach { $0.cancel() } + tasks.removeAll() + } + } + + private func deviceConnected() { + Task { + do { + try await mixer.attachVideo(AVCaptureDevice.default(.external, for: .video, position: .unspecified)) + } catch { + logger.error(error) + } + } + } +} + +@available(iOS 17.0, *) +extension UVCViewModel: MTHKViewRepresentable.PreviewSource { + nonisolated func connect(to view: MTHKView) { + Task { + await mixer.addOutput(view) + } + } +} + +@available(iOS 17.0, *) +extension UVCViewModel: PiPHKViewRepresentable.PreviewSource { + nonisolated func connect(to view: PiPHKView) { + Task { + await mixer.addOutput(view) + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift b/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift new file mode 100644 index 000000000..e20567f67 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/iOS/VisualEffect.swift @@ -0,0 +1,51 @@ +import AVFoundation +import CoreImage +import HaishinKit + +final class MonochromeEffect: VideoEffect { + let filter: CIFilter? = CIFilter(name: "CIColorMonochrome") + + func execute(_ image: CIImage) -> CIImage { + guard let filter else { + return image + } + filter.setValue(image, forKey: "inputImage") + filter.setValue(CIColor(red: 0.75, green: 0.75, blue: 0.75), forKey: "inputColor") + filter.setValue(1.0, forKey: "inputIntensity") + return filter.outputImage ?? image + } +} + +final class VividEffect: VideoEffect { + let filter: CIFilter? = CIFilter(name: "CIColorControls") + + func execute(_ image: CIImage) -> CIImage { + guard let filter else { + return image + } + filter.setValue(image, forKey: "inputImage") + filter.setValue(1.5, forKey: "inputSaturation") + filter.setValue(1.15, forKey: "inputContrast") + return filter.outputImage ?? image + } +} + +final class WarmEffect: VideoEffect { + let filter: CIFilter? = CIFilter(name: "CITemperatureAndTint") + let controls: CIFilter? = CIFilter(name: "CIColorControls") + + func execute(_ image: CIImage) -> CIImage { + guard let filter, let controls else { + return image + } + filter.setValue(image, forKey: "inputImage") + filter.setValue(CIVector(x: 6500, y: 0), forKey: "inputNeutral") + filter.setValue(CIVector(x: 4000, y: 0), forKey: "inputTargetNeutral") + guard let warmed = filter.outputImage else { return image } + + controls.setValue(warmed, forKey: "inputImage") + controls.setValue(1.1, forKey: "inputSaturation") + controls.setValue(1.05, forKey: "inputContrast") + return controls.outputImage ?? image + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 000000000..eb8789700 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png new file mode 100644 index 000000000..62e3de039 Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/AppIcon.png differ diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 000000000..bc26afc71 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,59 @@ +{ + "images" : [ + { + "idiom" : "mac", + "scale" : "1x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "512x512" + }, + { + "filename" : "AppIcon.png", + "idiom" : "mac", + "scale" : "2x", + "size" : "512x512" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift new file mode 100644 index 000000000..70479972f --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/ContentView.swift @@ -0,0 +1,54 @@ +import SwiftUI + +struct ContentView: View { + enum Tab: String, CaseIterable, Identifiable { + case playback + case publish + case preference + + var id: String { rawValue } + } + + @State private var selection: Tab = .playback + + var body: some View { + NavigationSplitView { + List(Tab.allCases, selection: $selection) { tab in + Label(tabTitle(tab), systemImage: tabIcon(tab)).onTapGesture { + selection = tab + } + } + } detail: { + switch selection { + case .playback: + PlaybackView() + case .publish: + PublishView() + case .preference: + PreferenceView() + } + } + } + + private func tabTitle(_ tab: Tab) -> String { + switch tab { + case .playback: + return "Playback" + case .publish: + return "Publish" + case .preference: + return "Preference" + } + } + + private func tabIcon(_ tab: Tab) -> String { + switch tab { + case .playback: + return "play.circle" + case .publish: + return "record.circle" + case .preference: + return "info.circle" + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements new file mode 100644 index 000000000..6133db3ff --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.entitlements @@ -0,0 +1,18 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.device.audio-input + + com.apple.security.device.camera + + com.apple.security.files.user-selected.read-only + + com.apple.security.network.client + + com.apple.security.network.server + + + diff --git a/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift new file mode 100644 index 000000000..7c833042e --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/HaishinApp.swift @@ -0,0 +1,34 @@ +import HaishinKit +@preconcurrency import Logboard +import RTCHaishinKit +import RTMPHaishinKit +import SRTHaishinKit +import SwiftUI + +let logger = LBLogger.with("com.haishinkit.HaishinKit.HaishinApp") + +@main +struct HaishinApp: App { + @State private var preference = PreferenceViewModel() + + var body: some Scene { + WindowGroup { + ContentView().environmentObject(preference) + } + } + + init() { + Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) + await SessionBuilderFactory.shared.register(HTTPSessionFactory()) + + await RTCLogger.shared.setLevel(.debug) + await SRTLogger.shared.setLevel(.debug) + } + LBLogger(kHaishinKitIdentifier).level = .debug + LBLogger(kSRTHaishinKitIdentifier).level = .debug + LBLogger(kRTCHaishinKitIdentifier).level = .debug + LBLogger(kRTMPHaishinKitIdentifier).level = .debug + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift new file mode 100644 index 000000000..dd201f9f2 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackView.swift @@ -0,0 +1,57 @@ +import HaishinKit +import SwiftUI + +struct PlaybackView: View { + @StateObject private var model = PlaybackViewModel() + @EnvironmentObject var preference: PreferenceViewModel + + var body: some View { + ZStack { + VStack { + PiPHKViewRepresentable(previewSource: model) + } + if model.readyState == .connecting { + VStack { + ProgressView() + } + } + }.alert(isPresented: $model.isShowError) { + Alert( + title: Text("Error"), + message: Text(model.error?.localizedDescription ?? ""), + dismissButton: .default(Text("OK")) + ) + } + .navigationTitle("Playback") + .toolbar { + switch model.readyState { + case .connecting: + ToolbarItem(placement: .primaryAction) { + } + case .open: + ToolbarItem(placement: .primaryAction) { + Button(action: { + model.stop() + }) { + Image(systemName: "stop.circle") + } + } + case .closed: + ToolbarItem(placement: .primaryAction) { + Button(action: { + model.start(preference) + }) { + Image(systemName: "play.circle") + } + } + case .closing: + ToolbarItem(placement: .primaryAction) { + } + } + } + } +} + +#Preview { + PlaybackView() +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift new file mode 100644 index 000000000..6dfdc8181 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/PlaybackViewModel.swift @@ -0,0 +1,74 @@ +@preconcurrency import AVKit +import HaishinKit +import SwiftUI + +@MainActor +final class PlaybackViewModel: ObservableObject { + @Published private(set) var readyState: SessionReadyState = .closed + @Published private(set) var error: Error? + @Published var isShowError = false + + private var view: PiPHKView? + private var session: (any Session)? + private let audioPlayer = AudioPlayer(audioEngine: AVAudioEngine()) + private var pictureInPictureController: AVPictureInPictureController? + + func start(_ preference: PreferenceViewModel) { + Task { + if session == nil { + await makeSession(preference) + } + do { + try await session?.connect { + Task { @MainActor in + self.isShowError = true + } + } + } catch { + self.error = error + self.isShowError = true + } + } + } + + func stop() { + Task { + do { + try await session?.close() + } catch { + logger.error(error) + } + } + } + + private func makeSession(_ preference: PreferenceViewModel) async { + do { + session = try await SessionBuilderFactory.shared.make(preference.makeURL()) + .setMode(.playback) + .build() + guard let session else { + return + } + if let view { + await session.stream.addOutput(view) + } + await session.stream.attachAudioPlayer(audioPlayer) + Task { + for await readyState in await session.readyState { + self.readyState = readyState + } + } + } catch { + logger.error(error) + } + } +} + +extension PlaybackViewModel: PiPHKViewRepresentable.PreviewSource { + // MARK: PiPHKSwiftUiView.PreviewSource + nonisolated func connect(to view: PiPHKView) { + Task { @MainActor in + self.view = view + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift new file mode 100644 index 000000000..502633508 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/PublishView.swift @@ -0,0 +1,110 @@ +import AVFoundation +import HaishinKit +import SwiftUI + +enum FPS: String, CaseIterable, Identifiable { + case fps15 = "15" + case fps30 = "30" + case fps60 = "60" + + var frameRate: Float64 { + switch self { + case .fps15: + return 15 + case .fps30: + return 30 + case .fps60: + return 60 + } + } + + var id: Self { self } +} + +enum VideoEffectItem: String, CaseIterable, Identifiable, Sendable { + case none + case monochrome + + var id: Self { self } + + func makeVideoEffect() -> VideoEffect? { + switch self { + case .none: + return nil + case .monochrome: + return MonochromeEffect() + } + } +} + +struct PublishView: View { + @Environment(\.horizontalSizeClass) private var horizontalSizeClass + @EnvironmentObject var preference: PreferenceViewModel + @StateObject private var model = PublishViewModel() + + var body: some View { + ZStack { + VStack { + MTHKViewRepresentable(previewSource: model) + } + VStack(alignment: .trailing) { + Picker("FPS", selection: $model.currentFPS) { + ForEach(FPS.allCases) { + Text($0.rawValue).tag($0) + } + } + .onChange(of: model.currentFPS) { tag in + model.setFrameRate(tag.frameRate) + } + .pickerStyle(.segmented) + .frame(width: 150) + .padding() + Spacer() + } + } + .onAppear { + model.startRunning(preference) + } + .onDisappear { + model.stopRunning() + } + .navigationTitle("Publish") + .toolbar { + switch model.readyState { + case .connecting: + ToolbarItem(placement: .primaryAction) { + } + case .open: + ToolbarItem(placement: .primaryAction) { + Button(action: { + model.stopPublishing() + }) { + Image(systemName: "stop.circle") + } + } + case .closed: + ToolbarItem(placement: .primaryAction) { + Button(action: { + model.startPublishing(preference) + }) { + Image(systemName: "record.circle") + } + } + case .closing: + ToolbarItem(placement: .primaryAction) { + } + } + } + .alert(isPresented: $model.isShowError) { + Alert( + title: Text("Error"), + message: Text(model.error?.localizedDescription ?? ""), + dismissButton: .default(Text("OK")) + ) + } + } +} + +#Preview { + PublishView() +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift b/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift new file mode 100644 index 000000000..18575d681 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/macOS/PublishViewModel.swift @@ -0,0 +1,174 @@ +import AVFoundation +import HaishinKit +import RTCHaishinKit +import SwiftUI + +@MainActor +final class PublishViewModel: ObservableObject { + @Published var currentFPS: FPS = .fps30 + @Published var visualEffectItem: VideoEffectItem = .none + @Published private(set) var error: Error? + @Published var isShowError = false + @Published private(set) var isTorchEnabled = false + @Published private(set) var readyState: SessionReadyState = .closed + private(set) var mixer = MediaMixer(captureSessionMode: .multi) + private var tasks: [Task] = [] + private var session: (any Session)? + private var currentPosition: AVCaptureDevice.Position = .back + @ScreenActor private var currentVideoEffect: VideoEffect? + + func startPublishing(_ preference: PreferenceViewModel) { + Task { + guard let session else { + return + } + do { + try await session.connect { + Task { @MainActor in + self.isShowError = true + } + } + } catch { + self.error = error + self.isShowError = true + logger.error(error) + } + } + } + + func stopPublishing() { + Task { + do { + try await session?.close() + } catch { + logger.error(error) + } + } + } + + func makeSession(_ preference: PreferenceViewModel) async { + // Make session. + do { + session = try await SessionBuilderFactory.shared.make(preference.makeURL()) + .setMode(.publish) + .build() + guard let session else { + return + } + await mixer.addOutput(session.stream) + tasks.append(Task { + for await readyState in await session.readyState { + self.readyState = readyState + } + }) + } catch { + self.error = error + isShowError = true + } + do { + if let session { + try await session.stream.setAudioSettings(preference.makeAudioCodecSettings(session.stream.audioSettings)) + } + } catch { + self.error = error + isShowError = true + } + do { + if let session { + try await session.stream.setVideoSettings(preference.makeVideoCodecSettings(session.stream.videoSettings)) + } + } catch { + self.error = error + isShowError = true + } + } + + func startRunning(_ preference: PreferenceViewModel) { + Task { + // SetUp a mixer. + var videoMixerSettings = await mixer.videoMixerSettings + videoMixerSettings.mode = .offscreen + await mixer.setVideoMixerSettings(videoMixerSettings) + // Attach devices + let back = AVCaptureDevice.default(for: .video) + try? await mixer.attachVideo(back, track: 0) + let audio = AVCaptureDevice.default(for: .audio) + try? await mixer.attachAudio(audio, track: 0) + await mixer.startRunning() + await makeSession(preference) + } + Task { @ScreenActor in + if await preference.isGPURendererEnabled { + await mixer.screen.isGPURendererEnabled = true + } else { + await mixer.screen.isGPURendererEnabled = false + } + let assetScreenObject = AssetScreenObject() + assetScreenObject.size = .init(width: 180, height: 180) + assetScreenObject.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + try? assetScreenObject.startReading(AVAsset(url: URL(fileURLWithPath: Bundle.main.path(forResource: "SampleVideo_360x240_5mb", ofType: "mp4") ?? ""))) + try? await mixer.screen.addChild(assetScreenObject) + await mixer.screen.size = .init(width: 1280, height: 720) + await mixer.screen.backgroundColor = NSColor.black.cgColor + } + } + + func stopRunning() { + Task { + await mixer.stopRunning() + try? await mixer.attachAudio(nil) + try? await mixer.attachVideo(nil) + if let session { + await mixer.removeOutput(session.stream) + } + tasks.forEach { $0.cancel() } + tasks.removeAll() + } + } + + func setVisualEffet(_ videoEffect: VideoEffectItem) { + Task { @ScreenActor in + if let currentVideoEffect { + _ = await mixer.screen.unregisterVideoEffect(currentVideoEffect) + } + if let videoEffect = videoEffect.makeVideoEffect() { + currentVideoEffect = videoEffect + _ = await mixer.screen.registerVideoEffect(videoEffect) + } + } + } + + func setFrameRate(_ fps: Float64) { + Task { + do { + // Sets to input frameRate. + try? await mixer.configuration(video: 0) { video in + do { + try video.setFrameRate(fps) + } catch { + logger.error(error) + } + } + try? await mixer.configuration(video: 1) { video in + do { + try video.setFrameRate(fps) + } catch { + logger.error(error) + } + } + // Sets to output frameRate. + try await mixer.setFrameRate(fps) + } catch { + logger.error(error) + } + } + } +} + +extension PublishViewModel: MTHKViewRepresentable.PreviewSource { + nonisolated func connect(to view: HaishinKit.MTHKView) { + Task { + await mixer.addOutput(view) + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4 b/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4 new file mode 100644 index 000000000..f27c0927f Binary files /dev/null and b/Vendor/HaishinKit.swift/Examples/macOS/SampleVideo_360x240_5mb.mp4 differ diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 000000000..eb8789700 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..2e003356c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,11 @@ +{ + "images" : [ + { + "idiom" : "tv" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Back.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json new file mode 100644 index 000000000..de59d885a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Contents.json @@ -0,0 +1,17 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + }, + "layers" : [ + { + "filename" : "Front.imagestacklayer" + }, + { + "filename" : "Middle.imagestacklayer" + }, + { + "filename" : "Back.imagestacklayer" + } + ] +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..2e003356c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,11 @@ +{ + "images" : [ + { + "idiom" : "tv" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Front.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..2e003356c --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,11 @@ +{ + "images" : [ + { + "idiom" : "tv" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon - App Store.imagestack/Middle.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..795cce172 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,16 @@ +{ + "images" : [ + { + "idiom" : "tv", + "scale" : "1x" + }, + { + "idiom" : "tv", + "scale" : "2x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Back.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json new file mode 100644 index 000000000..de59d885a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Contents.json @@ -0,0 +1,17 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + }, + "layers" : [ + { + "filename" : "Front.imagestacklayer" + }, + { + "filename" : "Middle.imagestacklayer" + }, + { + "filename" : "Back.imagestacklayer" + } + ] +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..795cce172 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,16 @@ +{ + "images" : [ + { + "idiom" : "tv", + "scale" : "1x" + }, + { + "idiom" : "tv", + "scale" : "2x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Front.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json new file mode 100644 index 000000000..795cce172 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Content.imageset/Contents.json @@ -0,0 +1,16 @@ +{ + "images" : [ + { + "idiom" : "tv", + "scale" : "1x" + }, + { + "idiom" : "tv", + "scale" : "2x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/App Icon.imagestack/Middle.imagestacklayer/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json new file mode 100644 index 000000000..f47ba43da --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Contents.json @@ -0,0 +1,32 @@ +{ + "assets" : [ + { + "filename" : "App Icon - App Store.imagestack", + "idiom" : "tv", + "role" : "primary-app-icon", + "size" : "1280x768" + }, + { + "filename" : "App Icon.imagestack", + "idiom" : "tv", + "role" : "primary-app-icon", + "size" : "400x240" + }, + { + "filename" : "Top Shelf Image Wide.imageset", + "idiom" : "tv", + "role" : "top-shelf-image-wide", + "size" : "2320x720" + }, + { + "filename" : "Top Shelf Image.imageset", + "idiom" : "tv", + "role" : "top-shelf-image", + "size" : "1920x720" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json new file mode 100644 index 000000000..795cce172 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image Wide.imageset/Contents.json @@ -0,0 +1,16 @@ +{ + "images" : [ + { + "idiom" : "tv", + "scale" : "1x" + }, + { + "idiom" : "tv", + "scale" : "2x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json new file mode 100644 index 000000000..795cce172 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/App Icon & Top Shelf Image.brandassets/Top Shelf Image.imageset/Contents.json @@ -0,0 +1,16 @@ +{ + "images" : [ + { + "idiom" : "tv", + "scale" : "1x" + }, + { + "idiom" : "tv", + "scale" : "2x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift new file mode 100644 index 000000000..cafb27af0 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/ContentView.swift @@ -0,0 +1,23 @@ +import SwiftUI + +struct ContentView: View { + var body: some View { + TabView { + PlaybackView() + .tabItem { + Image(systemName: "play.circle") + Text("Playback") + } + + PreferenceView() + .tabItem { + Image(systemName: "person.circle") + Text("Preference") + } + } + } +} + +#Preview { + ContentView() +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift new file mode 100644 index 000000000..e30d3a383 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/HaishinApp.swift @@ -0,0 +1,34 @@ +import HaishinKit +@preconcurrency import Logboard +import RTCHaishinKit +import RTMPHaishinKit +import SRTHaishinKit +import SwiftUI + +let logger = LBLogger.with("com.haishinkit.HaishinKit.HaishinApp") + +@main +struct HaishinApp: App { + @State private var preference = PreferenceViewModel() + + var body: some Scene { + WindowGroup { + ContentView().environmentObject(preference) + } + } + + init() { + Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) + await SessionBuilderFactory.shared.register(HTTPSessionFactory()) + + await RTCLogger.shared.setLevel(.debug) + await SRTLogger.shared.setLevel(.debug) + } + LBLogger(kHaishinKitIdentifier).level = .debug + LBLogger(kRTCHaishinKitIdentifier).level = .debug + LBLogger(kRTMPHaishinKitIdentifier).level = .debug + LBLogger(kSRTHaishinKitIdentifier).level = .debug + } +} diff --git a/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift b/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift new file mode 100644 index 000000000..bf9424793 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/tvOS/PublishView.swift @@ -0,0 +1,8 @@ +import SwiftUI + +struct PublishView: View { + var body: some View { + ZStack { + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift b/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift new file mode 100644 index 000000000..d341de8e7 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/visionOS/ContentView.swift @@ -0,0 +1,16 @@ +import HaishinKit +import RTMPHaishinKit +import SRTHaishinKit +import SwiftUI + +struct ContentView: View { + var body: some View { + VStack { + PlaybackView() + } + } +} + +#Preview(windowStyle: .automatic) { + ContentView() +} diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift b/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift new file mode 100644 index 000000000..f1e081c35 --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/visionOS/HaishinApp.swift @@ -0,0 +1,23 @@ +import HaishinKit +@preconcurrency import Logboard +import RTMPHaishinKit +import SRTHaishinKit +import SwiftUI + +let logger = LBLogger.with("com.haishinkit.HaishinKit.visionOSApp") + +@main +struct HaishinApp: App { + var body: some Scene { + WindowGroup { + ContentView() + } + } + + init() { + Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) + } + } +} diff --git a/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist b/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist new file mode 100644 index 000000000..20f75e2af --- /dev/null +++ b/Vendor/HaishinKit.swift/Examples/visionOS/Info.plist @@ -0,0 +1,15 @@ + + + + + UIApplicationSceneManifest + + UIApplicationPreferredDefaultSceneSessionRole + UIWindowSceneSessionRoleApplication + UIApplicationSupportsMultipleScenes + + UISceneConfigurations + + + + diff --git a/Vendor/HaishinKit.swift/Gemfile b/Vendor/HaishinKit.swift/Gemfile new file mode 100644 index 000000000..9135f1785 --- /dev/null +++ b/Vendor/HaishinKit.swift/Gemfile @@ -0,0 +1,8 @@ +source 'https://rubygems.org' + +gem 'cocoapods' +gem 'fastlane' +gem 'rubocop' +gem 'danger' +gem 'abbrev' +gem 'danger-privacymanifest', git: 'https://github.com/shogo4405/danger-privacymanifest' diff --git a/Vendor/HaishinKit.swift/Gemfile.lock b/Vendor/HaishinKit.swift/Gemfile.lock new file mode 100644 index 000000000..1e7673d94 --- /dev/null +++ b/Vendor/HaishinKit.swift/Gemfile.lock @@ -0,0 +1,412 @@ +GIT + remote: https://github.com/shogo4405/danger-privacymanifest + revision: 64757a8dd2d121a7996738d1cbc58fdfc0b297a8 + specs: + danger-privacymanifest (0.0.1) + danger-plugin-api (~> 1.0) + +GEM + remote: https://rubygems.org/ + specs: + CFPropertyList (3.0.8) + abbrev (0.1.2) + activesupport (7.2.3) + base64 + benchmark (>= 0.3) + bigdecimal + concurrent-ruby (~> 1.0, >= 1.3.1) + connection_pool (>= 2.2.5) + drb + i18n (>= 1.6, < 2) + logger (>= 1.4.2) + minitest (>= 5.1) + securerandom (>= 0.3) + tzinfo (~> 2.0, >= 2.0.5) + addressable (2.8.8) + public_suffix (>= 2.0.2, < 8.0) + algoliasearch (1.27.5) + httpclient (~> 2.8, >= 2.8.3) + json (>= 1.5.1) + artifactory (3.0.17) + ast (2.4.3) + atomos (0.1.3) + aws-eventstream (1.4.0) + aws-partitions (1.1209.0) + aws-sdk-core (3.241.4) + aws-eventstream (~> 1, >= 1.3.0) + aws-partitions (~> 1, >= 1.992.0) + aws-sigv4 (~> 1.9) + base64 + bigdecimal + jmespath (~> 1, >= 1.6.1) + logger + aws-sdk-kms (1.121.0) + aws-sdk-core (~> 3, >= 3.241.4) + aws-sigv4 (~> 1.5) + aws-sdk-s3 (1.212.0) + aws-sdk-core (~> 3, >= 3.241.4) + aws-sdk-kms (~> 1) + aws-sigv4 (~> 1.5) + aws-sigv4 (1.12.1) + aws-eventstream (~> 1, >= 1.0.2) + babosa (1.0.4) + base64 (0.2.0) + benchmark (0.5.0) + bigdecimal (4.0.1) + claide (1.1.0) + claide-plugins (0.9.2) + cork + nap + open4 (~> 1.3) + cocoapods (1.16.2) + addressable (~> 2.8) + claide (>= 1.0.2, < 2.0) + cocoapods-core (= 1.16.2) + cocoapods-deintegrate (>= 1.0.3, < 2.0) + cocoapods-downloader (>= 2.1, < 3.0) + cocoapods-plugins (>= 1.0.0, < 2.0) + cocoapods-search (>= 1.0.0, < 2.0) + cocoapods-trunk (>= 1.6.0, < 2.0) + cocoapods-try (>= 1.1.0, < 2.0) + colored2 (~> 3.1) + escape (~> 0.0.4) + fourflusher (>= 2.3.0, < 3.0) + gh_inspector (~> 1.0) + molinillo (~> 0.8.0) + nap (~> 1.0) + ruby-macho (>= 2.3.0, < 3.0) + xcodeproj (>= 1.27.0, < 2.0) + cocoapods-core (1.16.2) + activesupport (>= 5.0, < 8) + addressable (~> 2.8) + algoliasearch (~> 1.0) + concurrent-ruby (~> 1.1) + fuzzy_match (~> 2.0.4) + nap (~> 1.0) + netrc (~> 0.11) + public_suffix (~> 4.0) + typhoeus (~> 1.0) + cocoapods-deintegrate (1.0.5) + cocoapods-downloader (2.1) + cocoapods-plugins (1.0.0) + nap + cocoapods-search (1.0.1) + cocoapods-trunk (1.6.0) + nap (>= 0.8, < 2.0) + netrc (~> 0.11) + cocoapods-try (1.2.0) + colored (1.2) + colored2 (3.1.2) + commander (4.6.0) + highline (~> 2.0.0) + concurrent-ruby (1.3.6) + connection_pool (3.0.2) + cork (0.3.0) + colored2 (~> 3.1) + csv (3.3.5) + danger (9.5.3) + base64 (~> 0.2) + claide (~> 1.0) + claide-plugins (>= 0.9.2) + colored2 (>= 3.1, < 5) + cork (~> 0.1) + faraday (>= 0.9.0, < 3.0) + faraday-http-cache (~> 2.0) + git (>= 1.13, < 3.0) + kramdown (>= 2.5.1, < 3.0) + kramdown-parser-gfm (~> 1.0) + octokit (>= 4.0) + pstore (~> 0.1) + terminal-table (>= 1, < 5) + danger-plugin-api (1.0.0) + danger (> 2.0) + declarative (0.0.20) + digest-crc (0.7.0) + rake (>= 12.0.0, < 14.0.0) + domain_name (0.6.20240107) + dotenv (2.8.1) + drb (2.2.3) + emoji_regex (3.2.3) + escape (0.0.4) + ethon (0.15.0) + ffi (>= 1.15.0) + excon (0.112.0) + faraday (1.10.4) + faraday-em_http (~> 1.0) + faraday-em_synchrony (~> 1.0) + faraday-excon (~> 1.1) + faraday-httpclient (~> 1.0) + faraday-multipart (~> 1.0) + faraday-net_http (~> 1.0) + faraday-net_http_persistent (~> 1.0) + faraday-patron (~> 1.0) + faraday-rack (~> 1.0) + faraday-retry (~> 1.0) + ruby2_keywords (>= 0.0.4) + faraday-cookie_jar (0.0.8) + faraday (>= 0.8.0) + http-cookie (>= 1.0.0) + faraday-em_http (1.0.0) + faraday-em_synchrony (1.0.1) + faraday-excon (1.1.0) + faraday-http-cache (2.5.1) + faraday (>= 0.8) + faraday-httpclient (1.0.1) + faraday-multipart (1.2.0) + multipart-post (~> 2.0) + faraday-net_http (1.0.2) + faraday-net_http_persistent (1.2.0) + faraday-patron (1.0.0) + faraday-rack (1.0.0) + faraday-retry (1.0.3) + faraday_middleware (1.2.1) + faraday (~> 1.0) + fastimage (2.4.0) + fastlane (2.231.1) + CFPropertyList (>= 2.3, < 4.0.0) + abbrev (~> 0.1.2) + addressable (>= 2.8, < 3.0.0) + artifactory (~> 3.0) + aws-sdk-s3 (~> 1.0) + babosa (>= 1.0.3, < 2.0.0) + base64 (~> 0.2.0) + benchmark (>= 0.1.0) + bundler (>= 1.17.3, < 5.0.0) + colored (~> 1.2) + commander (~> 4.6) + csv (~> 3.3) + dotenv (>= 2.1.1, < 3.0.0) + emoji_regex (>= 0.1, < 4.0) + excon (>= 0.71.0, < 1.0.0) + faraday (~> 1.0) + faraday-cookie_jar (~> 0.0.6) + faraday_middleware (~> 1.0) + fastimage (>= 2.1.0, < 3.0.0) + fastlane-sirp (>= 1.0.0) + gh_inspector (>= 1.1.2, < 2.0.0) + google-apis-androidpublisher_v3 (~> 0.3) + google-apis-playcustomapp_v1 (~> 0.1) + google-cloud-env (>= 1.6.0, < 2.0.0) + google-cloud-storage (~> 1.31) + highline (~> 2.0) + http-cookie (~> 1.0.5) + json (< 3.0.0) + jwt (>= 2.1.0, < 3) + logger (>= 1.6, < 2.0) + mini_magick (>= 4.9.4, < 5.0.0) + multipart-post (>= 2.0.0, < 3.0.0) + mutex_m (~> 0.3.0) + naturally (~> 2.2) + nkf (~> 0.2.0) + optparse (>= 0.1.1, < 1.0.0) + ostruct (>= 0.1.0) + plist (>= 3.1.0, < 4.0.0) + rubyzip (>= 2.0.0, < 3.0.0) + security (= 0.1.5) + simctl (~> 1.6.3) + terminal-notifier (>= 2.0.0, < 3.0.0) + terminal-table (~> 3) + tty-screen (>= 0.6.3, < 1.0.0) + tty-spinner (>= 0.8.0, < 1.0.0) + word_wrap (~> 1.0.0) + xcodeproj (>= 1.13.0, < 2.0.0) + xcpretty (~> 0.4.1) + xcpretty-travis-formatter (>= 0.0.3, < 2.0.0) + fastlane-sirp (1.0.0) + sysrandom (~> 1.0) + ffi (1.17.3) + ffi (1.17.3-aarch64-linux-gnu) + ffi (1.17.3-aarch64-linux-musl) + ffi (1.17.3-arm-linux-gnu) + ffi (1.17.3-arm-linux-musl) + ffi (1.17.3-arm64-darwin) + ffi (1.17.3-x86-linux-gnu) + ffi (1.17.3-x86-linux-musl) + ffi (1.17.3-x86_64-darwin) + ffi (1.17.3-x86_64-linux-gnu) + ffi (1.17.3-x86_64-linux-musl) + fourflusher (2.3.1) + fuzzy_match (2.0.4) + gh_inspector (1.1.3) + git (2.3.3) + activesupport (>= 5.0) + addressable (~> 2.8) + process_executer (~> 1.1) + rchardet (~> 1.8) + google-apis-androidpublisher_v3 (0.54.0) + google-apis-core (>= 0.11.0, < 2.a) + google-apis-core (0.11.3) + addressable (~> 2.5, >= 2.5.1) + googleauth (>= 0.16.2, < 2.a) + httpclient (>= 2.8.1, < 3.a) + mini_mime (~> 1.0) + representable (~> 3.0) + retriable (>= 2.0, < 4.a) + rexml + google-apis-iamcredentials_v1 (0.17.0) + google-apis-core (>= 0.11.0, < 2.a) + google-apis-playcustomapp_v1 (0.13.0) + google-apis-core (>= 0.11.0, < 2.a) + google-apis-storage_v1 (0.31.0) + google-apis-core (>= 0.11.0, < 2.a) + google-cloud-core (1.8.0) + google-cloud-env (>= 1.0, < 3.a) + google-cloud-errors (~> 1.0) + google-cloud-env (1.6.0) + faraday (>= 0.17.3, < 3.0) + google-cloud-errors (1.5.0) + google-cloud-storage (1.47.0) + addressable (~> 2.8) + digest-crc (~> 0.4) + google-apis-iamcredentials_v1 (~> 0.1) + google-apis-storage_v1 (~> 0.31.0) + google-cloud-core (~> 1.6) + googleauth (>= 0.16.2, < 2.a) + mini_mime (~> 1.0) + googleauth (1.8.1) + faraday (>= 0.17.3, < 3.a) + jwt (>= 1.4, < 3.0) + multi_json (~> 1.11) + os (>= 0.9, < 2.0) + signet (>= 0.16, < 2.a) + highline (2.0.3) + http-cookie (1.0.8) + domain_name (~> 0.5) + httpclient (2.9.0) + mutex_m + i18n (1.14.8) + concurrent-ruby (~> 1.0) + jmespath (1.6.2) + json (2.18.0) + jwt (2.10.2) + base64 + kramdown (2.5.1) + rexml (>= 3.3.9) + kramdown-parser-gfm (1.1.0) + kramdown (~> 2.0) + language_server-protocol (3.17.0.5) + lint_roller (1.1.0) + logger (1.7.0) + mini_magick (4.13.2) + mini_mime (1.1.5) + minitest (6.0.1) + prism (~> 1.5) + molinillo (0.8.0) + multi_json (1.19.1) + multipart-post (2.4.1) + mutex_m (0.3.0) + nanaimo (0.4.0) + nap (1.1.0) + naturally (2.3.0) + netrc (0.11.0) + nkf (0.2.0) + octokit (10.0.0) + faraday (>= 1, < 3) + sawyer (~> 0.9) + open4 (1.3.4) + optparse (0.8.1) + os (1.1.4) + ostruct (0.6.3) + parallel (1.27.0) + parser (3.3.10.1) + ast (~> 2.4.1) + racc + plist (3.7.2) + prism (1.9.0) + process_executer (1.3.0) + pstore (0.2.0) + public_suffix (4.0.7) + racc (1.8.1) + rainbow (3.1.1) + rake (13.3.1) + rchardet (1.10.0) + regexp_parser (2.11.3) + representable (3.2.0) + declarative (< 0.1.0) + trailblazer-option (>= 0.1.1, < 0.2.0) + uber (< 0.2.0) + retriable (3.1.2) + rexml (3.4.4) + rouge (3.28.0) + rubocop (1.84.0) + json (~> 2.3) + language_server-protocol (~> 3.17.0.2) + lint_roller (~> 1.1.0) + parallel (~> 1.10) + parser (>= 3.3.0.2) + rainbow (>= 2.2.2, < 4.0) + regexp_parser (>= 2.9.3, < 3.0) + rubocop-ast (>= 1.49.0, < 2.0) + ruby-progressbar (~> 1.7) + unicode-display_width (>= 2.4.0, < 4.0) + rubocop-ast (1.49.0) + parser (>= 3.3.7.2) + prism (~> 1.7) + ruby-macho (2.5.1) + ruby-progressbar (1.13.0) + ruby2_keywords (0.0.5) + rubyzip (2.4.1) + sawyer (0.9.3) + addressable (>= 2.3.5) + faraday (>= 0.17.3, < 3) + securerandom (0.4.1) + security (0.1.5) + signet (0.21.0) + addressable (~> 2.8) + faraday (>= 0.17.5, < 3.a) + jwt (>= 1.5, < 4.0) + multi_json (~> 1.10) + simctl (1.6.10) + CFPropertyList + naturally + sysrandom (1.0.5) + terminal-notifier (2.0.0) + terminal-table (3.0.2) + unicode-display_width (>= 1.1.1, < 3) + trailblazer-option (0.1.2) + tty-cursor (0.7.1) + tty-screen (0.8.2) + tty-spinner (0.9.3) + tty-cursor (~> 0.7) + typhoeus (1.5.0) + ethon (>= 0.9.0, < 0.16.0) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) + uber (0.1.0) + unicode-display_width (2.6.0) + word_wrap (1.0.0) + xcodeproj (1.27.0) + CFPropertyList (>= 2.3.3, < 4.0) + atomos (~> 0.1.3) + claide (>= 1.0.2, < 2.0) + colored2 (~> 3.1) + nanaimo (~> 0.4.0) + rexml (>= 3.3.6, < 4.0) + xcpretty (0.4.1) + rouge (~> 3.28.0) + xcpretty-travis-formatter (1.0.1) + xcpretty (~> 0.2, >= 0.0.7) + +PLATFORMS + aarch64-linux-gnu + aarch64-linux-musl + arm-linux-gnu + arm-linux-musl + arm64-darwin + ruby + x86-linux-gnu + x86-linux-musl + x86_64-darwin + x86_64-linux-gnu + x86_64-linux-musl + +DEPENDENCIES + abbrev + cocoapods + danger + danger-privacymanifest! + fastlane + rubocop + +BUNDLED WITH + 2.6.7 diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift new file mode 100644 index 000000000..14f5965fc --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/ADTSHeader.swift @@ -0,0 +1,91 @@ +import CoreMedia +import Foundation + +package struct ADTSHeader: Equatable { + static let size: Int = 7 + static let sizeWithCrc = 9 + static let sync: UInt8 = 0xFF + + var sync = Self.sync + var id: UInt8 = 0 + var layer: UInt8 = 0 + var protectionAbsent = false + var profile: UInt8 = 0 + var sampleFrequencyIndex: UInt8 = 0 + var channelConfiguration: UInt8 = 0 + var originalOrCopy = false + var home = false + var copyrightIdBit = false + var copyrightIdStart = false + package var aacFrameLength: UInt16 = 0 + var bufferFullness: UInt16 = 0 + var aacFrames: UInt8 = 0 + + package init() { + } + + package init(data: Data) { + self.data = data + } + + package func makeFormatDescription() -> CMFormatDescription? { + guard + let type = AudioSpecificConfig.AudioObjectType(rawValue: profile + 1), + let frequency = AudioSpecificConfig.SamplingFrequency(rawValue: sampleFrequencyIndex), + let channel = AudioSpecificConfig.ChannelConfiguration(rawValue: channelConfiguration) else { + return nil + } + var formatDescription: CMAudioFormatDescription? + var audioStreamBasicDescription = AudioStreamBasicDescription( + mSampleRate: frequency.sampleRate, + mFormatID: kAudioFormatMPEG4AAC, + mFormatFlags: UInt32(type.rawValue), + mBytesPerPacket: 0, + mFramesPerPacket: 1024, + mBytesPerFrame: 0, + mChannelsPerFrame: UInt32(channel.rawValue), + mBitsPerChannel: 0, + mReserved: 0 + ) + guard CMAudioFormatDescriptionCreate( + allocator: kCFAllocatorDefault, + asbd: &audioStreamBasicDescription, + layoutSize: 0, + layout: nil, + magicCookieSize: 0, + magicCookie: nil, + extensions: nil, + formatDescriptionOut: &formatDescription + ) == noErr else { + return nil + } + return formatDescription + } +} + +extension ADTSHeader: DataConvertible { + package var data: Data { + get { + Data() + } + set { + guard ADTSHeader.size <= newValue.count else { + return + } + sync = newValue[0] + id = (newValue[1] & 0b00001111) >> 3 + layer = (newValue[1] >> 2) & 0b00000011 + protectionAbsent = (newValue[1] & 0b00000001) == 1 + profile = newValue[2] >> 6 & 0b11 + sampleFrequencyIndex = (newValue[2] >> 2) & 0b00001111 + channelConfiguration = ((newValue[2] & 0b1) << 2) | newValue[3] >> 6 + originalOrCopy = (newValue[3] & 0b00100000) == 0b00100000 + home = (newValue[3] & 0b00010000) == 0b00010000 + copyrightIdBit = (newValue[3] & 0b00001000) == 0b00001000 + copyrightIdStart = (newValue[3] & 0b00000100) == 0b00000100 + aacFrameLength = UInt16(newValue[3] & 0b00000011) << 11 | UInt16(newValue[4]) << 3 | UInt16(newValue[5] >> 5) + bufferFullness = UInt16(newValue[5]) >> 2 | UInt16(newValue[6] >> 2) + aacFrames = newValue[6] & 0b00000011 + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift new file mode 100644 index 000000000..4e261efc9 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/AudioCodec.swift @@ -0,0 +1,216 @@ +import AVFoundation + +/// The AudioCodec translate audio data to another format. +/// - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html +final class AudioCodec { + static let defaultFrameCapacity: UInt32 = 1024 + static let defaultInputBuffersCursor = 0 + + var settings: AudioCodecSettings = .default { + didSet { + if settings.invalidateConverter(oldValue) { + inputFormat = nil + } else { + settings.apply(audioConverter, oldValue: oldValue) + } + } + } + + var outputFormat: AVAudioFormat? { + return audioConverter?.outputFormat + } + + @AsyncStreamedFlow + var outputStream: AsyncStream<(AVAudioBuffer, AVAudioTime)> + + /// This instance is running to process(true) or not(false). + private(set) var isRunning = false + private(set) var inputFormat: AVAudioFormat? { + didSet { + guard inputFormat != oldValue else { + return + } + inputBuffers.removeAll() + inputBuffersCursor = Self.defaultInputBuffersCursor + outputBuffers.removeAll() + audioConverter = makeAudioConverter() + for _ in 0.. AVAudioBuffer? { + guard let inputFormat else { + return nil + } + switch inputFormat.formatDescription.mediaSubType { + case .linearPCM: + let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.defaultFrameCapacity) + buffer?.frameLength = Self.defaultFrameCapacity + return buffer + default: + return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024) + } + } + + private func makeAudioConverter() -> AVAudioConverter? { + guard + let inputFormat, + let outputFormat = settings.format.makeOutputAudioFormat(inputFormat, sampleRate: settings.sampleRate, channelMap: settings.channelMap) else { + return nil + } + let converter = AVAudioConverter(from: inputFormat, to: outputFormat) + settings.apply(converter, oldValue: nil) + if inputFormat.formatDescription.mediaSubType == .linearPCM { + ringBuffer = AudioRingBuffer(inputFormat) + } + if self.outputFormat?.sampleRate != outputFormat.sampleRate { + audioTime.reset() + } + if logger.isEnabledFor(level: .info) { + logger.info("converter:", converter ?? "nil", ",inputFormat:", inputFormat, ",outputFormat:", outputFormat) + } + return converter + } +} + +extension AudioCodec: Codec { + // MARK: Codec + typealias Buffer = AVAudioBuffer + + var outputBuffer: AVAudioBuffer { + guard let outputFormat = audioConverter?.outputFormat else { + return .init() + } + if outputBuffers.isEmpty { + for _ in 0.. Float64 { + let sampleRate = output == 0 ? input : output + guard let supportedSampleRate else { + return sampleRate + } + return supportedSampleRate.sorted { pow($0 - sampleRate, 2) < pow($1 - sampleRate, 2) }.first ?? sampleRate + } + + func makeFramesPerPacket(_ sampleRate: Double) -> UInt32 { + switch self { + case .aac: + return 1024 + case .opus: + // https://www.rfc-editor.org/rfc/rfc6716#section-2.1.4 + let frameDurationSec = 0.02 + return UInt32(sampleRate * frameDurationSec) + case .pcm: + return 1 + } + } + + func makeAudioBuffer(_ format: AVAudioFormat) -> AVAudioBuffer? { + switch self { + case .aac: + return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024 * Int(format.channelCount)) + case .opus: + return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024 * Int(format.channelCount)) + case .pcm: + return AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024) + } + } + + func makeOutputAudioFormat(_ format: AVAudioFormat, sampleRate: Float64, channelMap: [Int]?) -> AVAudioFormat? { + let channelCount: UInt32 + if let channelMap { + channelCount = UInt32(channelMap.count) + } else { + channelCount = format.channelCount + } + let mSampleRate = makeSampleRate(format.sampleRate, output: sampleRate) + let config = AudioSpecificConfig.ChannelConfiguration(channelCount: channelCount) + var streamDescription = AudioStreamBasicDescription( + mSampleRate: mSampleRate, + mFormatID: formatID, + mFormatFlags: formatFlags, + mBytesPerPacket: bytesPerPacket, + mFramesPerPacket: makeFramesPerPacket(mSampleRate), + mBytesPerFrame: bytesPerFrame, + mChannelsPerFrame: min( + config?.channelCount ?? format.channelCount, + AudioCodecSettings.maximumNumberOfChannels + ), + mBitsPerChannel: bitsPerChannel, + mReserved: 0 + ) + return AVAudioFormat( + streamDescription: &streamDescription, + channelLayout: config?.audioChannelLayout + ) + } + } + + /// Specifies the bitRate of audio output. + public var bitRate: Int + + /// Specifies the mixes the channels or not. + public var downmix: Bool + + /// Specifies the map of the output to input channels. + public var channelMap: [Int]? + + /// Specifies the sampleRate of audio output. A value of 0 will be the same as the main track source. + public let sampleRate: Float64 + + /// Specifies the output format. + public var format: AudioCodecSettings.Format = .aac + + /// Creates a new instance. + public init( + bitRate: Int = AudioCodecSettings.defaultBitRate, + downmix: Bool = true, + channelMap: [Int]? = nil, + sampleRate: Float64 = 0, + format: AudioCodecSettings.Format = .aac + ) { + self.bitRate = bitRate + self.downmix = downmix + self.channelMap = channelMap + self.sampleRate = sampleRate + self.format = format + } + + func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) { + guard let converter else { + return + } + if bitRate != oldValue?.bitRate { + let minAvailableBitRate = converter.applicableEncodeBitRates?.min(by: { a, b in + return a.intValue < b.intValue + })?.intValue ?? bitRate + let maxAvailableBitRate = converter.applicableEncodeBitRates?.max(by: { a, b in + return a.intValue < b.intValue + })?.intValue ?? bitRate + converter.bitRate = min(maxAvailableBitRate, max(minAvailableBitRate, bitRate)) + } + + if downmix != oldValue?.downmix { + converter.downmix = downmix + } + + if channelMap != oldValue?.channelMap, let newChannelMap = validatedChannelMap(converter) { + converter.channelMap = newChannelMap + } + } + + func invalidateConverter(_ rhs: AudioCodecSettings) -> Bool { + return !(format == rhs.format && channelMap == rhs.channelMap) + } + + private func validatedChannelMap(_ converter: AVAudioConverter) -> [NSNumber]? { + guard let channelMap, channelMap.count == converter.outputFormat.channelCount else { + return nil + } + for inputChannel in channelMap where converter.inputFormat.channelCount <= inputChannel { + return nil + } + return channelMap.map { NSNumber(value: $0) } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift new file mode 100644 index 000000000..0ac41e115 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/Codec.swift @@ -0,0 +1,9 @@ +import Foundation + +protocol Codec { + associatedtype Buffer + + var outputBuffer: Buffer { get } + + func releaseOutputBuffer(_ buffer: Buffer) +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift new file mode 100644 index 000000000..9278c6eec --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionConvertible.swift @@ -0,0 +1,30 @@ +import AVFoundation +import Foundation +import VideoToolbox + +enum VTSessionError: Swift.Error { + case failedToCreate(status: OSStatus) + case failedToPrepare(status: OSStatus) + case failedToConvert(status: OSStatus) +} + +protocol VTSessionConvertible { + func setOption(_ option: VTSessionOption) -> OSStatus + func setOptions(_ options: Set) -> OSStatus + func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws + func invalidate() +} + +extension VTSessionConvertible where Self: VTSession { + func setOption(_ option: VTSessionOption) -> OSStatus { + return VTSessionSetProperty(self, key: option.key.CFString, value: option.value) + } + + func setOptions(_ options: Set) -> OSStatus { + var properties: [AnyHashable: AnyObject] = [:] + for option in options { + properties[option.key.CFString] = option.value + } + return VTSessionSetProperties(self, propertyDictionary: properties as CFDictionary) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift new file mode 100644 index 000000000..b47968f87 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionMode.swift @@ -0,0 +1,59 @@ +import Foundation +import VideoToolbox + +enum VTSessionMode { + case compression + case decompression + + func makeSession(_ videoCodec: VideoCodec) throws -> any VTSessionConvertible { + switch self { + case .compression: + var session: VTCompressionSession? + var status = VTCompressionSessionCreate( + allocator: kCFAllocatorDefault, + width: Int32(videoCodec.settings.videoSize.width), + height: Int32(videoCodec.settings.videoSize.height), + codecType: videoCodec.settings.format.codecType, + encoderSpecification: videoCodec.settings.makeEncoderSpecification(), + imageBufferAttributes: videoCodec.makeImageBufferAttributes(.compression) as CFDictionary?, + compressedDataAllocator: nil, + outputCallback: nil, + refcon: nil, + compressionSessionOut: &session + ) + guard status == noErr, let session else { + throw VTSessionError.failedToCreate(status: status) + } + status = session.setOptions(videoCodec.settings.makeOptions()) + guard status == noErr else { + throw VTSessionError.failedToPrepare(status: status) + } + status = session.prepareToEncodeFrames() + guard status == noErr else { + throw VTSessionError.failedToPrepare(status: status) + } + if let expectedFrameRate = videoCodec.settings.expectedFrameRate { + status = session.setOption(.init(key: .expectedFrameRate, value: expectedFrameRate as CFNumber)) + } + videoCodec.frameInterval = videoCodec.settings.frameInterval + return session + case .decompression: + guard let formatDescription = videoCodec.inputFormat else { + throw VTSessionError.failedToCreate(status: kVTParameterErr) + } + var session: VTDecompressionSession? + let status = VTDecompressionSessionCreate( + allocator: kCFAllocatorDefault, + formatDescription: formatDescription, + decoderSpecification: nil, + imageBufferAttributes: videoCodec.makeImageBufferAttributes(.decompression) as CFDictionary?, + outputCallback: nil, + decompressionSessionOut: &session + ) + guard let session, status == noErr else { + throw VTSessionError.failedToCreate(status: status) + } + return session + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift new file mode 100644 index 000000000..98801244e --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOption.swift @@ -0,0 +1,18 @@ +import Foundation + +/// A structure that represents Key-Value-Object for the VideoToolbox option. +struct VTSessionOption { + let key: VTSessionOptionKey + let value: AnyObject +} + +extension VTSessionOption: Hashable { + // MARK: Hashable + static func == (lhs: VTSessionOption, rhs: VTSessionOption) -> Bool { + return lhs.key.CFString == rhs.key.CFString + } + + func hash(into hasher: inout Hasher) { + return hasher.combine(key.CFString) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift new file mode 100644 index 000000000..57575a1c2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VTSessionOptionKey.swift @@ -0,0 +1,65 @@ +import Foundation +import VideoToolbox + +struct VTSessionOptionKey: Codable, RawRepresentable { + typealias RawValue = String + + static let depth = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_Depth as String) + static let profileLevel = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ProfileLevel as String) + static let H264EntropyMode = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_H264EntropyMode as String) + static let numberOfPendingFrames = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_NumberOfPendingFrames as String) + static let pixelBufferPoolIsShared = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelBufferPoolIsShared as String) + static let videoEncoderPixelBufferAttributes = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_VideoEncoderPixelBufferAttributes as String) + static let aspectRatio16x9 = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AspectRatio16x9 as String) + static let cleanAperture = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_CleanAperture as String) + static let fieldCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_FieldCount as String) + static let fieldDetail = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_FieldDetail as String) + static let pixelAspectRatio = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelAspectRatio as String) + static let progressiveScan = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ProgressiveScan as String) + static let colorPrimaries = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ColorPrimaries as String) + static let transferFunction = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_TransferFunction as String) + static let YCbCrMatrix = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_YCbCrMatrix as String) + static let ICCProfile = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ICCProfile as String) + static let expectedDuration = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ExpectedDuration as String) + static let expectedFrameRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ExpectedFrameRate as String) + static let sourceFrameCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_SourceFrameCount as String) + static let allowFrameReordering = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AllowFrameReordering as String) + static let allowTemporalCompression = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AllowTemporalCompression as String) + static let maxKeyFrameInterval = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxKeyFrameInterval as String) + static let maxKeyFrameIntervalDuration = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration as String) + + #if os(macOS) + static let usingHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder as String) + static let requireHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder as String) + static let enableHardwareAcceleratedVideoEncoder = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder as String) + #endif + + static let multiPassStorage = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MultiPassStorage as String) + static let forceKeyFrame = VTSessionOptionKey(rawValue: kVTEncodeFrameOptionKey_ForceKeyFrame as String) + static let pixelTransferProperties = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_PixelTransferProperties as String) + static let averageBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_AverageBitRate as String) + static let dataRateLimits = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_DataRateLimits as String) + static let moreFramesAfterEnd = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MoreFramesAfterEnd as String) + static let moreFramesBeforeStart = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MoreFramesBeforeStart as String) + static let quality = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_Quality as String) + static let realTime = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_RealTime as String) + static let maxH264SliceBytes = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxH264SliceBytes as String) + static let maxFrameDelayCount = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_MaxFrameDelayCount as String) + static let encoderID = VTSessionOptionKey(rawValue: kVTVideoEncoderSpecification_EncoderID as String) + + @available(iOS 16.0, tvOS 16.0, macOS 13.0, *) + static let constantBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_ConstantBitRate as String) + + @available(iOS 26.0, tvOS 26.0, macOS 26.0, *) + static let variableBitRate = VTSessionOptionKey(rawValue: kVTCompressionPropertyKey_VariableBitRate as String) + + let rawValue: String + + var CFString: CFString { + return rawValue as CFString + } + + init(rawValue: String) { + self.rawValue = rawValue + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift new file mode 100644 index 000000000..8dd08ae2f --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodec.swift @@ -0,0 +1,174 @@ +import AVFoundation +import CoreFoundation +import VideoToolbox +#if canImport(UIKit) +import UIKit +#endif + +final class VideoCodec { + static let frameInterval: Double = 0.0 + + var settings: VideoCodecSettings = .default { + didSet { + let invalidateSession = settings.invalidateSession(oldValue) + if invalidateSession { + self.invalidateSession = invalidateSession + } else { + settings.apply(self, rhs: oldValue) + } + } + } + var passthrough = true + var outputStream: AsyncStream { + AsyncStream { continuation in + self.continuation = continuation + } + } + var frameInterval = VideoCodec.frameInterval + private var startedAt: CMTime = .zero + private var continuation: AsyncStream.Continuation? + private var invalidateSession = true + private var presentationTimeStamp: CMTime = .zero + private(set) var isRunning = false + private(set) var inputFormat: CMFormatDescription? { + didSet { + guard inputFormat != oldValue else { + return + } + invalidateSession = true + outputFormat = nil + } + } + private(set) var session: (any VTSessionConvertible)? { + didSet { + oldValue?.invalidate() + invalidateSession = false + } + } + private(set) var outputFormat: CMFormatDescription? + + func append(_ sampleBuffer: CMSampleBuffer) { + guard isRunning else { + return + } + do { + inputFormat = sampleBuffer.formatDescription + if invalidateSession { + if sampleBuffer.formatDescription?.isCompressed == true { + session = try VTSessionMode.decompression.makeSession(self) + } else { + session = try VTSessionMode.compression.makeSession(self) + } + } + guard let session, let continuation else { + return + } + if sampleBuffer.formatDescription?.isCompressed == true { + try session.convert(sampleBuffer, continuation: continuation) + } else { + if useFrame(sampleBuffer.presentationTimeStamp) { + try session.convert(sampleBuffer, continuation: continuation) + presentationTimeStamp = sampleBuffer.presentationTimeStamp + } + } + } catch { + logger.warn(error) + } + } + + func makeImageBufferAttributes(_ mode: VTSessionMode) -> [NSString: AnyObject]? { + switch mode { + case .compression: + var attributes: [NSString: AnyObject] = [:] + if let inputFormat { + // Specify the pixel format of the uncompressed video. + attributes[kCVPixelBufferPixelFormatTypeKey] = inputFormat.mediaType.rawValue as CFNumber + } + return attributes.isEmpty ? nil : attributes + case .decompression: + return [ + kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(), + kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue + ] + } + } + + private func useFrame(_ presentationTimeStamp: CMTime) -> Bool { + guard startedAt <= presentationTimeStamp else { + return false + } + guard self.presentationTimeStamp < presentationTimeStamp else { + return false + } + guard Self.frameInterval < frameInterval else { + return true + } + return frameInterval <= presentationTimeStamp.seconds - self.presentationTimeStamp.seconds + } + + #if os(iOS) || os(tvOS) || os(visionOS) + @objc + private func applicationWillEnterForeground(_ notification: Notification) { + invalidateSession = true + } + + @objc + private func didAudioSessionInterruption(_ notification: Notification) { + guard + let userInfo: [AnyHashable: Any] = notification.userInfo, + let value: NSNumber = userInfo[AVAudioSessionInterruptionTypeKey] as? NSNumber, + let type = AVAudioSession.InterruptionType(rawValue: value.uintValue) else { + return + } + switch type { + case .ended: + invalidateSession = true + default: + break + } + } + #endif +} + +extension VideoCodec: Runner { + // MARK: Running + func startRunning() { + guard !isRunning else { + return + } + #if os(iOS) || os(tvOS) || os(visionOS) + NotificationCenter.default.addObserver( + self, + selector: #selector(self.didAudioSessionInterruption), + name: AVAudioSession.interruptionNotification, + object: nil + ) + NotificationCenter.default.addObserver( + self, + selector: #selector(self.applicationWillEnterForeground), + name: UIApplication.willEnterForegroundNotification, + object: nil + ) + #endif + startedAt = passthrough ? .zero : CMClockGetTime(CMClockGetHostTimeClock()) + isRunning = true + } + + func stopRunning() { + guard isRunning else { + return + } + isRunning = false + session = nil + invalidateSession = true + inputFormat = nil + outputFormat = nil + presentationTimeStamp = .zero + continuation?.finish() + startedAt = .zero + #if os(iOS) || os(tvOS) || os(visionOS) + NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil) + NotificationCenter.default.removeObserver(self, name: UIApplication.willEnterForegroundNotification, object: nil) + #endif + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift new file mode 100644 index 000000000..9e4bbf167 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Codec/VideoCodecSettings.swift @@ -0,0 +1,229 @@ +import Foundation +import VideoToolbox + +/// Constraints on the video codec compression settings. +public struct VideoCodecSettings: Codable, Sendable { + /// The number of frame rate for 30fps. + public static let frameInterval30 = (1 / 30) - 0.001 + /// The number of frame rate for 10fps. + public static let frameInterval10 = (1 / 10) - 0.001 + /// The number of frame rate for 5fps. + public static let frameInterval05 = (1 / 05) - 0.001 + /// The number of frame rate for 1fps. + public static let frameInterval01 = (1 / 01) - 0.001 + + /// The defulat value. + public static let `default` = VideoCodecSettings() + + /// A bitRate mode that affectes how to encode the video source. + public struct BitRateMode: Sendable, CustomStringConvertible, Codable, Hashable, Equatable { + public static func == (lhs: VideoCodecSettings.BitRateMode, rhs: VideoCodecSettings.BitRateMode) -> Bool { + lhs.key == rhs.key + } + + /// The average bit rate. + public static let average = BitRateMode(key: .averageBitRate) + + /// The constant bit rate. + @available(iOS 16.0, tvOS 16.0, macOS 13.0, *) + public static let constant = BitRateMode(key: .constantBitRate) + + /// The variable bit rate. + /// - seealso: [kVTCompressionPropertyKey_VariableBitRate](https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_variablebitrate) + @available(iOS 26.0, tvOS 26.0, macOS 26.0, *) + public static let variable = BitRateMode(key: .variableBitRate) + + let key: VTSessionOptionKey + + public var description: String { + key.CFString as String + } + + public func hash(into hasher: inout Hasher) { + return hasher.combine(description) + } + } + + /** + * The scaling mode. + * - seealso: https://developer.apple.com/documentation/videotoolbox/kvtpixeltransferpropertykey_scalingmode + * - seealso: https://developer.apple.com/documentation/videotoolbox/vtpixeltransfersession/pixel_transfer_properties/scaling_mode_constants + */ + public enum ScalingMode: String, Codable, Sendable { + /// kVTScalingMode_Normal + case normal = "Normal" + /// kVTScalingMode_Letterbox + case letterbox = "Letterbox" + /// kVTScalingMode_CropSourceToCleanAperture + case cropSourceToCleanAperture = "CropSourceToCleanAperture" + /// kVTScalingMode_Trim + case trim = "Trim" + } + + /// The type of the VideoCodec supports format. + package enum Format: Codable, Sendable, CaseIterable { + case h264 + case hevc + + #if os(macOS) + var encoderID: NSString { + switch self { + case .h264: + #if arch(arm64) + return NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc") + #else + return NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva") + #endif + case .hevc: + return NSString(string: "com.apple.videotoolbox.videoencoder.ave.hevc") + } + } + #endif + + var codecType: UInt32 { + switch self { + case .h264: + return kCMVideoCodecType_H264 + case .hevc: + return kCMVideoCodecType_HEVC + } + } + } + + /// Specifies the video size of encoding video. + public var videoSize: CGSize + /// Specifies the bitrate. + public var bitRate: Int + /// Specifies the H264 profileLevel. + public var profileLevel: String { + didSet { + if profileLevel.contains("HEVC") { + format = .hevc + } else { + format = .h264 + } + } + } + /// Specifies the scalingMode. + public var scalingMode: ScalingMode + /// Specifies the bitRateMode. + public var bitRateMode: BitRateMode + /// Specifies the keyframeInterval. + public var maxKeyFrameIntervalDuration: Int32 + /// Specifies the allowFrameRecording. + public var allowFrameReordering: Bool? // swiftlint:disable:this discouraged_optional_boolean + /// Specifies the dataRateLimits + public var dataRateLimits: [Double]? + /// Specifies the low-latency opretaion for an encoder. + public var isLowLatencyRateControlEnabled: Bool + /// Specifies the hardware accelerated encoder is enabled(TRUE), or not(FALSE) for macOS. + public var isHardwareAcceleratedEnabled: Bool + /// Specifies the video frame interval. + public var frameInterval: Double = 0.0 + /// Specifies the expected frame rate for an encoder. It may optimize power consumption. + public var expectedFrameRate: Double? + + package var format: Format = .h264 + + /// Creates a new VideoCodecSettings instance. + public init( + videoSize: CGSize = .init(width: 854, height: 480), + bitRate: Int = 640 * 1000, + profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String, + scalingMode: ScalingMode = .trim, + bitRateMode: BitRateMode = .average, + maxKeyFrameIntervalDuration: Int32 = 2, + // swiftlint:disable discouraged_optional_boolean + allowFrameReordering: Bool? = nil, + // swiftlint:enable discouraged_optional_boolean + dataRateLimits: [Double]? = [0.0, 0.0], + isLowLatencyRateControlEnabled: Bool = false, + isHardwareAcceleratedEnabled: Bool = true, + expectedFrameRate: Double? = nil + ) { + self.videoSize = videoSize + self.bitRate = bitRate + self.profileLevel = profileLevel + self.scalingMode = scalingMode + self.bitRateMode = bitRateMode + self.maxKeyFrameIntervalDuration = maxKeyFrameIntervalDuration + self.allowFrameReordering = allowFrameReordering + self.dataRateLimits = dataRateLimits + self.isLowLatencyRateControlEnabled = isLowLatencyRateControlEnabled + self.isHardwareAcceleratedEnabled = isHardwareAcceleratedEnabled + self.expectedFrameRate = expectedFrameRate + if profileLevel.contains("HEVC") { + self.format = .hevc + } + } + + func invalidateSession(_ rhs: VideoCodecSettings) -> Bool { + return !(videoSize == rhs.videoSize && + maxKeyFrameIntervalDuration == rhs.maxKeyFrameIntervalDuration && + scalingMode == rhs.scalingMode && + allowFrameReordering == rhs.allowFrameReordering && + bitRateMode == rhs.bitRateMode && + profileLevel == rhs.profileLevel && + dataRateLimits == rhs.dataRateLimits && + isLowLatencyRateControlEnabled == rhs.isLowLatencyRateControlEnabled && + isHardwareAcceleratedEnabled == rhs.isHardwareAcceleratedEnabled + ) + } + + func apply(_ codec: VideoCodec, rhs: VideoCodecSettings) { + if bitRate != rhs.bitRate { + logger.info("bitRate change from ", rhs.bitRate, " to ", bitRate) + let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitRate)) + _ = codec.session?.setOption(option) + } + if frameInterval != rhs.frameInterval { + codec.frameInterval = frameInterval + } + if expectedFrameRate != rhs.expectedFrameRate { + let value = if let expectedFrameRate { expectedFrameRate } else { 0.0 } + let option = VTSessionOption(key: .expectedFrameRate, value: value as CFNumber) + _ = codec.session?.setOption(option) + } + } + + // https://developer.apple.com/documentation/videotoolbox/encoding_video_for_live_streaming + func makeOptions() -> Set { + let isBaseline = profileLevel.contains("Baseline") + var options = Set([ + .init(key: .realTime, value: kCFBooleanTrue), + .init(key: .profileLevel, value: profileLevel as NSObject), + .init(key: bitRateMode.key, value: NSNumber(value: bitRate)), + .init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)), + .init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject), + .init(key: .pixelTransferProperties, value: [ + "ScalingMode": scalingMode.rawValue + ] as NSObject) + ]) + if bitRateMode == .average { + if let dataRateLimits, dataRateLimits.count == 2 { + var limits = [Double](repeating: 0.0, count: 2) + limits[0] = dataRateLimits[0] == 0 ? Double(bitRate) / 8 * 1.5 : dataRateLimits[0] + limits[1] = dataRateLimits[1] == 0 ? Double(1.0) : dataRateLimits[1] + options.insert(.init(key: .dataRateLimits, value: limits as NSArray)) + } + } + #if os(macOS) + if isHardwareAcceleratedEnabled { + options.insert(.init(key: .encoderID, value: format.encoderID)) + options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue)) + options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue)) + } + #endif + if !isBaseline && profileLevel.contains("H264") { + options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC)) + } + return options + } + + func makeEncoderSpecification() -> CFDictionary? { + if isLowLatencyRateControlEnabled { + return [kVTVideoEncoderSpecification_EnableLowLatencyRateControl: true as CFBoolean] as CFDictionary + } + return nil + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md new file mode 100644 index 000000000..80f3d2e4a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/Localization/ja.lproj/index.md @@ -0,0 +1,86 @@ +# ``HaishinKit`` +メインモジュールです。 + +## 🔍 概要 +ライブストリーミングに必要なカメラやマイクのミキシング機能の提供を行います。各モジュールに対して共通の処理を提供します。 + +### モジュール構成 +|モジュール|説明| +|:-|:-| +|HaishinKit|本モジュールです。| +|RTMPHaishinKit|RTMPプロトコルスタックを提供します。| +|SRTHaishinKit|SRTプロトコルスタックを提供します。| +|RTCHaishinKit|WebRTCのWHEP/WHIPプロトコルスタックを提供します。現在α版です。| +|MoQTHaishinKit|MoQTプロトコルスタックを提供します。現在α版です。 + +## 🎨 機能 +以下の機能を提供しています。 +- ライブミキシング + - [映像のミキシング](doc://HaishinKit/videomixing) + - カメラ映像や静止画を一つの配信映像ソースとして扱います。 + - 音声のミキシング + - 異なるマイク音声を合成して一つの配信音声ソースとして扱います。 +- Session + - RTMP/SRT/WHEP/WHIPといったプロトコルを統一的なAPIで扱えます。 + +## 📖 利用方法 +### ライブミキシング +```swift +let mixer = MediaMixer() + +Task { + do { + // Attaches the microphone device. + try await mixer.attachAudio(AVCaptureDevice.default(for: .audio)) + } catch { + print(error) + } + + do { + // Attaches the camera device. + try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) + } catch { + print(error) + } + + // Associates the stream object with the MediaMixer. + await mixer.addOutput(stream) + await mixer.startRunning() +} +``` + +### Session api. +RTMPやSRTとのクライアントとしての実装を統一的なAPIで扱えます。リトライ処理などもAPI内部で行います。 + +#### 前準備 +```swift +import HaishinKit +import RTMPHaishinKit +import SRTHaishinKit + +Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) +} +``` + +#### Sessionの作成 +```swift +let session = try await SessionBuilderFactory.shared.make(URL(string: "rtmp://hostname/live/live")) + .setMode(.ingest) + .build() +``` +```swift +let session = try await SessionBuilderFactory.shared.make(URL(string: "srt://hostname:448?stream=xxxxx")) + .setMode(.playback) + .build() +``` + +#### 接続 +配信や視聴を行います。 +```swift +try session.connect { + print("on disconnected") +} +``` + diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md new file mode 100644 index 000000000..9e4b4fe29 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/faq.md @@ -0,0 +1,12 @@ +# FAQ +Frequently Asked Questions and Answers from a Technical Perspective. + +## Q. Is it possible to use a UVC camera? +Yes. Starting with iPadOS 17.0, it became available through [the OS API](https://developer.apple.com/documentation/avfoundation/avcapturedevice/devicetype-swift.struct/external). Unfortunately, its operation on iOS has not been confirmed. +```swift +if #available(iOS 17.0, *) { + let camera = AVCaptureDevice.default(.external, for: .video, +position: .unspecified) + try? await mixer.attachVideo(camera, track: 0) +} +``` diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md new file mode 100644 index 000000000..d1174d38c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/index.md @@ -0,0 +1,90 @@ +# ``HaishinKit`` +This is the main module. + +## 🔍 Overview +Provides camera and microphone mixing functionality required for live streaming. +It also offers common processing across each module. + +### Module Structure +| Module | Description | +|:-|:-| +| HaishinKit | This module. | +| RTMPHaishinKit | Provides the RTMP protocol stack. | +| SRTHaishinKit | Provides the SRT protocol stack. | +| RTCHaishinKit | Provides the WebRTC WHEP/WHIP protocol stack. Currently in alpha. | +| MoQTHaishinKit | Provides the MoQT protocol stack. Currently in alpha. | + +## 🎨 Features +The following features are available: +- Live Mixing + - [Video Mixing](doc://HaishinKit/videomixing) + - Treats camera video and still images as a single stream source. + - Audio Mixing + - Combines different microphone audio sources into a single audio stream source. +- Session + - Provides a unified API for protocols such as RTMP, SRT, WHEP, and WHIP. + +## 📖 Usage +### Live Mixing +```swift +let mixer = MediaMixer() + +Task { + do { + // Attaches the microphone device. + try await mixer.attachAudio(AVCaptureDevice.default(for: .audio)) + } catch { + print(error) + } + + do { + // Attaches the camera device. + try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) + } catch { + print(error) + } + + // Associates the stream object with the MediaMixer. + await mixer.addOutput(stream) + await mixer.startRunning() +} +``` + +### Session API +Provides a unified API for implementing clients with RTMP and SRT. Retry handling is also performed internally by the API. + +#### Preparation +```swift +import HaishinKit +import RTMPHaishinKit +import SRTHaishinKit + +Task { + await SessionBuilderFactory.shared.register(RTMPSessionFactory()) + await SessionBuilderFactory.shared.register(SRTSessionFactory()) +} +``` + +#### Make Session +**RTMP** +Please provide the RTMP connection URL combined with the streamName. +```swift +let session = try await SessionBuilderFactory.shared.make(URL(string: "rtmp://hostname/appName/stramName")) + .setMode(.publish) + .build() +``` +**SRT** +```swift +let session = try await SessionBuilderFactory.shared.make(URL(string: "srt://hostname:448?stream=xxxxx")) + .setMode(.playback) + .build() +``` + +#### Connecting +Used for publishing or playback. +```swift +try session.connect { + print("on disconnected") +} +``` + diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md new file mode 100644 index 000000000..c71a607d5 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/known-issue.md @@ -0,0 +1,17 @@ +# Known issue + +## 🔍 Overview +This section lists known issues that cannot be resolved within HaishinKit. It mainly summarizes problems that occur during development with Xcode. + +### When Debugging with Xcode +The following issues may occur while developing with Xcode connected. + +#### Application Freezes on Launch +When `MediaMixer#startRunning()` is executed while the app is launched from Xcode, the application may freeze. +It has been confirmed that this does not occur when the application is force-quit and then relaunched. +- iOS18, Xcode16 The issue is still ongoing in the latest version. + +#### Freeze When Starting Recording +When `StreamRecorder#startRecording()` is executed while the app is launched from Xcode, the application may freeze. +It has been confirmed that this does not occur when the application is force-quit and then relaunched. +- iOS18, Xcode16 The issue is still ongoing in the latest version. diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md new file mode 100644 index 000000000..eba80de09 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Docs.docc/videomixing.md @@ -0,0 +1,57 @@ +# Video mixing +HaishinKit provides APIs for overlaying still images on camera footage and for embedding text. These features are collectively referred to as [ScreenObjects](https://docs.haishinkit.com/swift/latest/documentation/haishinkit/screenobject). +Filtering with CIFilter is also supported, and for use cases such as applying a mosaic effect to camera footage, the use of CIFilter is recommended. + +## Usage +Here is an overview of how to use the typical ScreenObject objects. + +### ImageScreenObject +An example of compositing images. +```swift +let imageScreenObject = ImageScreenObject() +let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "") +if let provider = CGDataProvider(url: imageURL as CFURL) { + imageScreenObject.verticalAlignment = .bottom + imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0) + imageScreenObject.cgImage = CGImage( + pngDataProviderSource: provider, + decode: nil, + shouldInterpolate: false, + intent: .defaultIntent + ) +} else { + print("no image") +} + +try? await mixer.screen.addChild(imageScreenObject) +``` + +### VideoTrackScreenObject +There may be situations where you want to capture the scenery with the rear camera while showing your facial expression with the front camera. + +First, set up the cameras as follows. Make sure to remember the track numbers, as they will be used later. +```swift +Task { + let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + try? await mixer.attachVideo(back, track: 0) + let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + try? await mixer.attachVideo(front, track: 1) +} +``` + +Track number 0 is designed to be rendered across the entire screen. In this case, we are specifying where to render track number 1. + +```swift +Task { @ScreenActor in + let videoScreenObject = VideoTrackScreenObject() + videoScreenObject.cornerRadius = 32.0 + videoScreenObject.track = 1 + videoScreenObject.horizontalAlignment = .right + videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16) + videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2) + // You can add a CIFilter-based filter using the registerVideoEffect API. + _ = videoScreenObject.registerVideoEffect(MonochromeEffect()) + + try? await mixer.screen.addChild(videoScreenObject) +} +``` diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift new file mode 100644 index 000000000..aa74d7f1a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioBuffer+Extension.swift @@ -0,0 +1,7 @@ +import AVFoundation + +#if hasAttribute(retroactive) +extension AVAudioBuffer: @retroactive @unchecked Sendable {} +#else +extension AVAudioBuffer: @unchecked Sendable {} +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift new file mode 100644 index 000000000..923c597fd --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioCompressedBuffer+Extension.swift @@ -0,0 +1,32 @@ +import AVFoundation +import Foundation + +extension AVAudioCompressedBuffer { + @discardableResult + @inline(__always) + final func copy(_ buffer: AVAudioBuffer) -> Bool { + guard let buffer = buffer as? AVAudioCompressedBuffer else { + return false + } + if let packetDescriptions = buffer.packetDescriptions { + self.packetDescriptions?.pointee = packetDescriptions.pointee + } + packetCount = buffer.packetCount + byteLength = buffer.byteLength + data.copyMemory(from: buffer.data, byteCount: Int(buffer.byteLength)) + return true + } + + package func encode(to data: inout Data) { + guard let config = AudioSpecificConfig(formatDescription: format.formatDescription) else { + return + } + config.encode(to: &data, length: Int(byteLength)) + data.withUnsafeMutableBytes { + guard let baseAddress = $0.baseAddress else { + return + } + memcpy(baseAddress.advanced(by: AudioSpecificConfig.adtsHeaderSize), self.data, Int(self.byteLength)) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift new file mode 100644 index 000000000..3fc3f3f01 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVAudioPCMBuffer+Extension.swift @@ -0,0 +1,117 @@ +import Accelerate +import AVFoundation + +extension AVAudioPCMBuffer { + final func makeSampleBuffer(_ when: AVAudioTime) -> CMSampleBuffer? { + var status: OSStatus = noErr + var sampleBuffer: CMSampleBuffer? + status = CMAudioSampleBufferCreateWithPacketDescriptions( + allocator: nil, + dataBuffer: nil, + dataReady: false, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: format.formatDescription, + sampleCount: Int(frameLength), + presentationTimeStamp: when.makeTime(), + packetDescriptions: nil, + sampleBufferOut: &sampleBuffer + ) + guard let sampleBuffer else { + logger.warn("CMAudioSampleBufferCreateWithPacketDescriptions returned error: ", status) + return nil + } + status = CMSampleBufferSetDataBufferFromAudioBufferList( + sampleBuffer, + blockBufferAllocator: kCFAllocatorDefault, + blockBufferMemoryAllocator: kCFAllocatorDefault, + flags: 0, + bufferList: audioBufferList + ) + if status != noErr { + logger.warn("CMSampleBufferSetDataBufferFromAudioBufferList returned error: ", status) + } + return sampleBuffer + } + + @discardableResult + @inlinable + final func copy(_ audioBuffer: AVAudioBuffer) -> Bool { + guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer, frameLength == audioBuffer.frameLength else { + return false + } + let numSamples = Int(frameLength) + if format.isInterleaved { + let channelCount = Int(format.channelCount) + switch format.commonFormat { + case .pcmFormatInt16: + memcpy(int16ChannelData?[0], audioBuffer.int16ChannelData?[0], numSamples * channelCount * 2) + case .pcmFormatInt32: + memcpy(int32ChannelData?[0], audioBuffer.int32ChannelData?[0], numSamples * channelCount * 4) + case .pcmFormatFloat32: + memcpy(floatChannelData?[0], audioBuffer.floatChannelData?[0], numSamples * channelCount * 4) + default: + break + } + } else { + for i in 0.. AVAudioPCMBuffer? { + guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { + return nil + } + buffer.frameLength = frameLength + buffer.copy(self) + return buffer + } + + @discardableResult + @inlinable + final func muted(_ isMuted: Bool) -> AVAudioPCMBuffer { + guard isMuted else { + return self + } + let numSamples = Int(frameLength) + if format.isInterleaved { + let channelCount = Int(format.channelCount) + switch format.commonFormat { + case .pcmFormatInt16: + int16ChannelData?[0].update(repeating: 0, count: numSamples * channelCount) + case .pcmFormatInt32: + int32ChannelData?[0].update(repeating: 0, count: numSamples * channelCount) + case .pcmFormatFloat32: + floatChannelData?[0].update(repeating: 0, count: numSamples * channelCount) + default: + break + } + } else { + for i in 0.. CMTime { + return .init(seconds: AVAudioTime.seconds(forHostTime: hostTime), preferredTimescale: 1000000000) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift new file mode 100644 index 000000000..827c020f2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice+Extension.swift @@ -0,0 +1,21 @@ +import AVFoundation +import Foundation + +@available(tvOS 17.0, *) +extension AVCaptureDevice { + func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? { + if isMultiCamSupported { + return formats.first { + $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height + } ?? formats.last { + $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height + } + } else { + return formats.first { + $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height + } ?? formats.last { + $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height + } + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift new file mode 100644 index 000000000..b8c4826a4 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVCaptureDevice.Format+Extension.swift @@ -0,0 +1,42 @@ +import AVFoundation +import Foundation + +#if os(macOS) +extension AVCaptureDevice.Format { + var isMultiCamSupported: Bool { + return true + } +} +#elseif os(visionOS) +extension AVCaptureDevice.Format { + var isMultiCamSupported: Bool { + return false + } +} +#endif + +@available(tvOS 17.0, *) +extension AVCaptureDevice.Format { + func isFrameRateSupported(_ frameRate: Float64) -> Bool { + var durations: [CMTime] = [] + var frameRates: [Float64] = [] + for range in videoSupportedFrameRateRanges { + if range.minFrameRate == range.maxFrameRate { + durations.append(range.minFrameDuration) + frameRates.append(range.maxFrameRate) + continue + } + if range.contains(frameRate: frameRate) { + return true + } + return false + } + let diff = frameRates.map { abs($0 - frameRate) } + if let minElement = diff.min() { + for i in 0.. Float64 { + max(minFrameRate, min(maxFrameRate, rate)) + } + + func contains(frameRate: Float64) -> Bool { + (minFrameRate...maxFrameRate) ~= frameRate + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift new file mode 100644 index 000000000..3ee238d1f --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/AVLayerVideoGravity+Extension.swift @@ -0,0 +1,34 @@ +import AVFoundation +import Foundation + +extension AVLayerVideoGravity { + func scale(_ display: CGSize, image: CGSize) -> CGAffineTransform { + switch self { + case .resize: + return .init(scaleX: display.width / image.width, y: display.width / image.height) + case .resizeAspect: + let scale = min(display.width / image.width, display.height / image.height) + return .init(scaleX: scale, y: scale) + case .resizeAspectFill: + let scale = max(display.width / image.width, display.height / image.height) + return .init(scaleX: scale, y: scale) + default: + return .init(scaleX: 1.0, y: 1.0) + } + } + + func region(_ display: CGRect, image: CGRect) -> CGRect { + switch self { + case .resize: + return image + case .resizeAspect: + return image + case .resizeAspectFill: + let x = abs(display.width - image.width) / 2 + let y = abs(display.height - image.height) / 2 + return .init(origin: .init(x: x, y: y), size: display.size) + default: + return image + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift new file mode 100644 index 000000000..77f9ff2af --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CGImage+Extension.swift @@ -0,0 +1,8 @@ +import CoreGraphics +import Foundation + +extension CGImage { + var size: CGSize { + return .init(width: width, height: height) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift new file mode 100644 index 000000000..0ee87509e --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMSampleBuffer+Extension.swift @@ -0,0 +1,26 @@ +import Accelerate +import AVFoundation +import CoreMedia + +package extension CMSampleBuffer { + @inlinable @inline(__always) var isNotSync: Bool { + get { + guard !sampleAttachments.isEmpty else { + return false + } + return sampleAttachments[0][.notSync] != nil + } + set { + guard !sampleAttachments.isEmpty else { + return + } + sampleAttachments[0][.notSync] = newValue ? 1 : nil + } + } +} + +#if hasAttribute(retroactive) +extension CMSampleBuffer: @retroactive @unchecked Sendable {} +#else +extension CMSampleBuffer: @unchecked Sendable {} +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift new file mode 100644 index 000000000..40388f327 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMTime+Extension.swift @@ -0,0 +1,15 @@ +import AVFoundation +import Foundation + +extension CMTime { + func makeAudioTime() -> AVAudioTime { + return .init(sampleTime: value, atRate: Double(timescale)) + } + + func convertTime(from: CMClock?, to: CMClock? = CMClockGetHostTimeClock()) -> CMTime { + guard let from, let to else { + return self + } + return from.convertTime(self, to: to) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift new file mode 100644 index 000000000..525ba839a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoDimention+Extension.swift @@ -0,0 +1,8 @@ +import AVFoundation +import Foundation + +extension CMVideoDimensions { + var size: CGSize { + return .init(width: CGFloat(width), height: CGFloat(height)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift new file mode 100644 index 000000000..db55a514c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CMVideoFormatDescription+Extension.swift @@ -0,0 +1,83 @@ +import CoreImage +import CoreMedia + +extension CMVideoFormatDescription { + package var isCompressed: Bool { + switch CMFormatDescriptionGetMediaSubType(self) { + case kCVPixelFormatType_1Monochrome, + kCVPixelFormatType_2Indexed, + kCVPixelFormatType_8Indexed, + kCVPixelFormatType_1IndexedGray_WhiteIsZero, + kCVPixelFormatType_2IndexedGray_WhiteIsZero, + kCVPixelFormatType_4IndexedGray_WhiteIsZero, + kCVPixelFormatType_8IndexedGray_WhiteIsZero, + kCVPixelFormatType_16BE555, + kCVPixelFormatType_16LE555, + kCVPixelFormatType_16LE5551, + kCVPixelFormatType_16BE565, + kCVPixelFormatType_16LE565, + kCVPixelFormatType_24RGB, + kCVPixelFormatType_24BGR, + kCVPixelFormatType_32ARGB, + kCVPixelFormatType_32BGRA, + kCVPixelFormatType_32ABGR, + kCVPixelFormatType_32RGBA, + kCVPixelFormatType_64ARGB, + kCVPixelFormatType_48RGB, + kCVPixelFormatType_32AlphaGray, + kCVPixelFormatType_16Gray, + kCVPixelFormatType_30RGB, + kCVPixelFormatType_422YpCbCr8, + kCVPixelFormatType_4444YpCbCrA8, + kCVPixelFormatType_4444YpCbCrA8R, + kCVPixelFormatType_4444AYpCbCr8, + kCVPixelFormatType_4444AYpCbCr16, + kCVPixelFormatType_444YpCbCr8, + kCVPixelFormatType_422YpCbCr16, + kCVPixelFormatType_422YpCbCr10, + kCVPixelFormatType_444YpCbCr10, + kCVPixelFormatType_420YpCbCr8Planar, + kCVPixelFormatType_420YpCbCr8PlanarFullRange, + kCVPixelFormatType_422YpCbCr_4A_8BiPlanar, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + kCVPixelFormatType_422YpCbCr8_yuvs, + kCVPixelFormatType_422YpCbCr8FullRange, + kCVPixelFormatType_OneComponent8, + kCVPixelFormatType_TwoComponent8, + kCVPixelFormatType_OneComponent16Half, + kCVPixelFormatType_OneComponent32Float, + kCVPixelFormatType_TwoComponent16Half, + kCVPixelFormatType_TwoComponent32Float, + kCVPixelFormatType_64RGBAHalf, + kCVPixelFormatType_128RGBAFloat, + kCVPixelFormatType_Lossy_32BGRA, + kCVPixelFormatType_Lossless_32BGRA, + kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange, + kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange, + kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, + kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, + kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange, + kCVPixelFormatType_Lossy_422YpCbCr10PackedBiPlanarVideoRange, + kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, + kCVPixelFormatType_Lossless_422YpCbCr10PackedBiPlanarVideoRange: + return false + default: + return true + } + } + + var configurationBox: Data? { + guard let atoms = CMFormatDescriptionGetExtension(self, extensionKey: kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms) as? NSDictionary else { + return nil + } + switch mediaSubType { + case .h264: + return atoms["avcC"] as? Data + case .hevc: + return atoms["hvcC"] as? Data + default: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift new file mode 100644 index 000000000..bf04dfae2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBuffer+Extension.swift @@ -0,0 +1,155 @@ +import Accelerate +import CoreImage +import CoreVideo +import Foundation + +extension CVPixelBuffer { + enum Error: Swift.Error { + case failedToLock(_ status: CVReturn) + case failedToUnlock(_ status: CVReturn) + case unsupportedFormat(_ format: OSType) + } + + static let lockFlags = CVPixelBufferLockFlags(rawValue: .zero) + + @inlinable @inline(__always) + var size: CGSize { + return .init(width: CVPixelBufferGetWidth(self), height: CVPixelBufferGetHeight(self)) + } + + @inlinable @inline(__always) + var dataSize: Int { + CVPixelBufferGetDataSize(self) + } + + @inlinable @inline(__always) + var pixelFormatType: OSType { + CVPixelBufferGetPixelFormatType(self) + } + + @inlinable @inline(__always) + var baseAddress: UnsafeMutableRawPointer? { + CVPixelBufferGetBaseAddress(self) + } + + @inlinable @inline(__always) + var planeCount: Int { + CVPixelBufferGetPlaneCount(self) + } + + @inlinable @inline(__always) + var bytesPerRow: Int { + CVPixelBufferGetBytesPerRow(self) + } + + @inlinable @inline(__always) + var width: Int { + CVPixelBufferGetWidth(self) + } + + @inlinable @inline(__always) + var height: Int { + CVPixelBufferGetHeight(self) + } + + @inlinable @inline(__always) + var formatType: OSType { + CVPixelBufferGetPixelFormatType(self) + } + + @inline(__always) + func copy(_ pixelBuffer: CVPixelBuffer?) throws { + // https://stackoverflow.com/questions/53132611/copy-a-cvpixelbuffer-on-any-ios-device + try pixelBuffer?.mutate(.readOnly) { pixelBuffer in + if planeCount == 0 { + let dst = self.baseAddress + let src = pixelBuffer.baseAddress + let bytesPerRowSrc = pixelBuffer.bytesPerRow + let bytesPerRowDst = bytesPerRowSrc + if bytesPerRowSrc == bytesPerRowDst { + memcpy(dst, src, height * bytesPerRowSrc) + } else { + var startOfRowSrc = src + var startOfRowDst = dst + for _ in 0.. CIImage { + try lockBaseAddress(.readOnly) + let result = CIImage(cvPixelBuffer: self) + try unlockBaseAddress(.readOnly) + return result + } + + @inline(__always) + func mutate(_ lockFlags: CVPixelBufferLockFlags, lambda: (CVPixelBuffer) throws -> Void) throws { + let status = CVPixelBufferLockBaseAddress(self, lockFlags) + guard status == kCVReturnSuccess else { + throw Error.failedToLock(status) + } + defer { + CVPixelBufferUnlockBaseAddress(self, lockFlags) + } + try lambda(self) + } + + @inlinable + @inline(__always) + func baseAddressOfPlane(_ index: Int) -> UnsafeMutableRawPointer? { + CVPixelBufferGetBaseAddressOfPlane(self, index) + } + + @inlinable + @inline(__always) + func getHeightOfPlane(_ index: Int) -> Int { + CVPixelBufferGetHeightOfPlane(self, index) + } + + @inlinable + @inline(__always) + func bytesPerRawOfPlane(_ index: Int) -> Int { + CVPixelBufferGetBytesPerRowOfPlane(self, index) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift new file mode 100644 index 000000000..b81862008 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/CVPixelBufferPool+Extension.swift @@ -0,0 +1,13 @@ +import CoreVideo +import Foundation + +extension CVPixelBufferPool { + @discardableResult + func createPixelBuffer(_ pixelBuffer: UnsafeMutablePointer) -> CVReturn { + return CVPixelBufferPoolCreatePixelBuffer( + kCFAllocatorDefault, + self, + pixelBuffer + ) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift new file mode 100644 index 000000000..dd67e5b6c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Data+Extension.swift @@ -0,0 +1,46 @@ +import CoreMedia +import Foundation + +package extension Data { + var bytes: [UInt8] { + withUnsafeBytes { + guard let pointer = $0.baseAddress?.assumingMemoryBound(to: UInt8.self) else { + return [] + } + return [UInt8](UnsafeBufferPointer(start: pointer, count: count)) + } + } + + func makeBlockBuffer(advancedBy: Int = 0) -> CMBlockBuffer? { + var blockBuffer: CMBlockBuffer? + let length = count - advancedBy + return withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> CMBlockBuffer? in + guard let baseAddress = buffer.baseAddress else { + return nil + } + guard CMBlockBufferCreateWithMemoryBlock( + allocator: kCFAllocatorDefault, + memoryBlock: nil, + blockLength: length, + blockAllocator: nil, + customBlockSource: nil, + offsetToData: 0, + dataLength: length, + flags: 0, + blockBufferOut: &blockBuffer) == noErr else { + return nil + } + guard let blockBuffer else { + return nil + } + guard CMBlockBufferReplaceDataBytes( + with: baseAddress.advanced(by: advancedBy), + blockBuffer: blockBuffer, + offsetIntoDestination: 0, + dataLength: length) == noErr else { + return nil + } + return blockBuffer + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift new file mode 100644 index 000000000..832146f83 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift @@ -0,0 +1,24 @@ +import Foundation + +package extension ExpressibleByIntegerLiteral { + var data: Data { + return withUnsafePointer(to: self) { value in + return Data(bytes: UnsafeRawPointer(value), count: MemoryLayout.size) + } + } + + init(data: Data) { + let diff: Int = MemoryLayout.size - data.count + if 0 < diff { + var buffer = Data(repeating: 0, count: diff) + buffer.append(data) + self = buffer.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee } + return + } + self = data.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee } + } + + init(data: Slice) { + self.init(data: Data(data)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift new file mode 100644 index 000000000..e33150df9 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/Mirror+Extension.swift @@ -0,0 +1,22 @@ +import Foundation + +package extension Mirror { + var debugDescription: String { + var data: [String] = [] + if let superclassMirror = superclassMirror { + for child in superclassMirror.children { + guard let label = child.label else { + continue + } + data.append("\(label): \(child.value)") + } + } + for child in children { + guard let label = child.label else { + continue + } + data.append("\(label): \(child.value)") + } + return "\(subjectType){\(data.joined(separator: ","))}" + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift new file mode 100644 index 000000000..9f2c2f256 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTCompressionSession+Extension.swift @@ -0,0 +1,38 @@ +import Foundation +import VideoToolbox + +extension VTCompressionSession { + func prepareToEncodeFrames() -> OSStatus { + VTCompressionSessionPrepareToEncodeFrames(self) + } +} + +extension VTCompressionSession: VTSessionConvertible { + @inline(__always) + func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws { + guard let imageBuffer = sampleBuffer.imageBuffer else { + return + } + var flags: VTEncodeInfoFlags = [] + let status = VTCompressionSessionEncodeFrame( + self, + imageBuffer: imageBuffer, + presentationTimeStamp: sampleBuffer.presentationTimeStamp, + duration: sampleBuffer.duration, + frameProperties: nil, + infoFlagsOut: &flags, + outputHandler: { _, _, sampleBuffer in + if let sampleBuffer { + continuation?.yield(sampleBuffer) + } + } + ) + if status != noErr { + throw VTSessionError.failedToConvert(status: status) + } + } + + func invalidate() { + VTCompressionSessionInvalidate(self) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift new file mode 100644 index 000000000..6f7b6cc50 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/VTDecompressionSession+Extension.swift @@ -0,0 +1,62 @@ +import Foundation +import VideoToolbox + +extension VTDecompressionSession: VTSessionConvertible { + static let defaultDecodeFlags: VTDecodeFrameFlags = [ + ._EnableAsynchronousDecompression, + ._EnableTemporalProcessing + ] + + @inline(__always) + func convert(_ sampleBuffer: CMSampleBuffer, continuation: AsyncStream.Continuation?) throws { + var flagsOut: VTDecodeInfoFlags = [] + var _: VTEncodeInfoFlags = [] + let status = VTDecompressionSessionDecodeFrame( + self, + sampleBuffer: sampleBuffer, + flags: Self.defaultDecodeFlags, + infoFlagsOut: &flagsOut, + outputHandler: { status, _, imageBuffer, presentationTimeStamp, duration in + guard let imageBuffer else { + return + } + var status = noErr + var outputFormat: CMFormatDescription? + status = CMVideoFormatDescriptionCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: imageBuffer, + formatDescriptionOut: &outputFormat + ) + guard let outputFormat, status == noErr else { + return + } + var timingInfo = CMSampleTimingInfo( + duration: duration, + presentationTimeStamp: presentationTimeStamp, + decodeTimeStamp: .invalid + ) + var sampleBuffer: CMSampleBuffer? + status = CMSampleBufferCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: imageBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &sampleBuffer + ) + if let sampleBuffer { + continuation?.yield(sampleBuffer) + } + } + ) + if status != noErr { + throw VTSessionError.failedToConvert(status: status) + } + } + + func invalidate() { + VTDecompressionSessionInvalidate(self) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift new file mode 100644 index 000000000..424884e2e --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Extension/vImage_Buffer+Extension.swift @@ -0,0 +1,36 @@ +import Accelerate +import CoreMedia +import CoreVideo +import Foundation + +extension vImage_Buffer { + init?(height: vImagePixelCount, width: vImagePixelCount, pixelBits: UInt32, flags: vImage_Flags) { + self.init() + guard vImageBuffer_Init( + &self, + height, + width, + pixelBits, + flags) == kvImageNoError else { + return nil + } + } + + @discardableResult + mutating func copy(to cvPixelBuffer: CVPixelBuffer, format: inout vImage_CGImageFormat) -> vImage_Error { + let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer).takeRetainedValue() + vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB()) + defer { + if let dictionary = CVBufferCopyAttachments(cvPixelBuffer, .shouldNotPropagate) { + CVBufferSetAttachments(cvPixelBuffer, dictionary, .shouldPropagate) + } + } + return vImageBuffer_CopyToCVPixelBuffer( + &self, + &format, + cvPixelBuffer, + cvImageFormat, + nil, + vImage_Flags(kvImageNoFlags)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift new file mode 100644 index 000000000..abf27929b --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/AudioSpecificConfig.swift @@ -0,0 +1,299 @@ +import AVFoundation + +/** + The Audio Specific Config is the global header for MPEG-4 Audio + - seealso: http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config + - seealso: http://wiki.multimedia.cx/?title=Understanding_AAC + */ +package struct AudioSpecificConfig: Equatable { + package static let adtsHeaderSize: Int = 7 + + package enum AudioObjectType: UInt8 { + case unknown = 0 + case aacMain = 1 + case aacLc = 2 + case aacSsr = 3 + case aacLtp = 4 + case aacSbr = 5 + case aacScalable = 6 + case twinqVQ = 7 + case celp = 8 + case hxvc = 9 + + init?(objectID: MPEG4ObjectID?) { + switch objectID { + case .aac_Main?: + self = .aacMain + case .AAC_LC?: + self = .aacLc + case .AAC_SSR?: + self = .aacSsr + case .AAC_LTP?: + self = .aacLtp + case .AAC_SBR?: + self = .aacSbr + case .aac_Scalable?: + self = .aacScalable + case .twinVQ?: + self = .twinqVQ + case .CELP?: + self = .celp + case .HVXC?: + self = .hxvc + case .none: + return nil + @unknown default: + return nil + } + } + } + + enum SamplingFrequency: UInt8 { + case hz96000 = 0 + case hz88200 = 1 + case hz64000 = 2 + case hz48000 = 3 + case hz44100 = 4 + case hz32000 = 5 + case hz24000 = 6 + case hz22050 = 7 + case hz16000 = 8 + case hz12000 = 9 + case hz11025 = 10 + case hz8000 = 11 + case hz7350 = 12 + + var sampleRate: Float64 { + switch self { + case .hz96000: + return 96000 + case .hz88200: + return 88200 + case .hz64000: + return 64000 + case .hz48000: + return 48000 + case .hz44100: + return 44100 + case .hz32000: + return 32000 + case .hz24000: + return 24000 + case .hz22050: + return 22050 + case .hz16000: + return 16000 + case .hz12000: + return 12000 + case .hz11025: + return 11025 + case .hz8000: + return 8000 + case .hz7350: + return 7350 + } + } + + init?(sampleRate: Float64) { + switch Int(sampleRate) { + case 96000: + self = .hz96000 + case 88200: + self = .hz88200 + case 64000: + self = .hz64000 + case 48000: + self = .hz48000 + case 44100: + self = .hz44100 + case 32000: + self = .hz32000 + case 24000: + self = .hz24000 + case 22050: + self = .hz22050 + case 16000: + self = .hz16000 + case 12000: + self = .hz12000 + case 11025: + self = .hz11025 + case 8000: + self = .hz8000 + case 7350: + self = .hz7350 + default: + return nil + } + } + } + + enum ChannelConfiguration: UInt8 { + case definedInAOTSpecificConfig = 0 + case frontCenter = 1 + case frontLeftAndFrontRight = 2 + case frontCenterAndFrontLeftAndFrontRight = 3 + case frontCenterAndFrontLeftAndFrontRightAndBackCenter = 4 + case frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight = 5 + case frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE = 6 + case frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE = 7 + + var channelCount: UInt32 { + switch self { + case .definedInAOTSpecificConfig: + return 0 + case .frontCenter: + return 1 + case .frontLeftAndFrontRight: + return 2 + case .frontCenterAndFrontLeftAndFrontRight: + return 3 + case .frontCenterAndFrontLeftAndFrontRightAndBackCenter: + return 4 + case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight: + return 5 + case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE: + return 6 + case .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE: + return 8 + } + } + + var audioChannelLayoutTag: AudioChannelLayoutTag? { + switch self { + case .definedInAOTSpecificConfig: + return nil + case .frontCenter: + return nil + case .frontLeftAndFrontRight: + return nil + case .frontCenterAndFrontLeftAndFrontRight: + return kAudioChannelLayoutTag_MPEG_3_0_B + case .frontCenterAndFrontLeftAndFrontRightAndBackCenter: + return kAudioChannelLayoutTag_MPEG_4_0_B + case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight: + return kAudioChannelLayoutTag_MPEG_5_0_D + case .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE: + return kAudioChannelLayoutTag_MPEG_5_1_D + case .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE: + return kAudioChannelLayoutTag_MPEG_7_1_B + } + } + + var audioChannelLayout: AVAudioChannelLayout? { + guard let audioChannelLayoutTag else { + return nil + } + return AVAudioChannelLayout(layoutTag: audioChannelLayoutTag) + } + + init?(channelCount: UInt32) { + switch channelCount { + case 0: + self = .definedInAOTSpecificConfig + case 1: + self = .frontCenter + case 2: + self = .frontLeftAndFrontRight + case 3: + self = .frontCenterAndFrontLeftAndFrontRight + case 4: + self = .frontCenterAndFrontLeftAndFrontRightAndBackCenter + case 5: + self = .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRight + case 6: + self = .frontCenterAndFrontLeftAndFrontRightAndBackLeftAndBackRightLFE + case 8: + self = .frontCenterAndFrontLeftAndFrontRightAndSideLeftAndSideRightAndBackLeftAndBackRightLFE + default: + return nil + } + } + } + + let type: AudioObjectType + let frequency: SamplingFrequency + let channelConfig: ChannelConfiguration + let frameLengthFlag = false + + package var bytes: [UInt8] { + var bytes = [UInt8](repeating: 0, count: 2) + bytes[0] = type.rawValue << 3 | (frequency.rawValue >> 1) + bytes[1] = (frequency.rawValue & 0x1) << 7 | (channelConfig.rawValue & 0xF) << 3 + return bytes + } + + package init?(bytes: [UInt8]) { + guard + let type = AudioObjectType(rawValue: bytes[0] >> 3), + let frequency = SamplingFrequency(rawValue: (bytes[0] & 0b00000111) << 1 | (bytes[1] >> 7)), + let channel = ChannelConfiguration(rawValue: (bytes[1] & 0b01111000) >> 3) else { + return nil + } + self.type = type + self.frequency = frequency + self.channelConfig = channel + } + + init(type: AudioObjectType, frequency: SamplingFrequency, channel: ChannelConfiguration) { + self.type = type + self.frequency = frequency + self.channelConfig = channel + } + + package init?(formatDescription: CMFormatDescription?) { + guard + let streamDescription = formatDescription?.audioStreamBasicDescription, + let type = AudioObjectType(objectID: MPEG4ObjectID(rawValue: Int(streamDescription.mFormatFlags))), + let frequency = SamplingFrequency(sampleRate: streamDescription.mSampleRate), + let channelConfig = ChannelConfiguration(channelCount: streamDescription.mChannelsPerFrame) else { + return nil + } + self.type = type + self.frequency = frequency + self.channelConfig = channelConfig + } + + func encode(to data: inout Data, length: Int) { + let fullSize: Int = Self.adtsHeaderSize + length + data[0] = 0xFF + data[1] = 0xF9 + data[2] = (type.rawValue - 1) << 6 | (frequency.rawValue << 2) | (channelConfig.rawValue >> 2) + data[3] = (channelConfig.rawValue & 3) << 6 | UInt8(fullSize >> 11) + data[4] = UInt8((fullSize & 0x7FF) >> 3) + data[5] = ((UInt8(fullSize & 7)) << 5) + 0x1F + data[6] = 0xFC + } + + package func makeAudioFormat() -> AVAudioFormat? { + var audioStreamBasicDescription = makeAudioStreamBasicDescription() + if let audioChannelLayoutTag = channelConfig.audioChannelLayoutTag { + return AVAudioFormat( + streamDescription: &audioStreamBasicDescription, + channelLayout: AVAudioChannelLayout(layoutTag: audioChannelLayoutTag) + ) + } + return AVAudioFormat(streamDescription: &audioStreamBasicDescription) + } + + private func makeAudioStreamBasicDescription() -> AudioStreamBasicDescription { + AudioStreamBasicDescription( + mSampleRate: frequency.sampleRate, + mFormatID: kAudioFormatMPEG4AAC, + mFormatFlags: UInt32(type.rawValue), + mBytesPerPacket: 0, + mFramesPerPacket: frameLengthFlag ? 960 : 1024, + mBytesPerFrame: 0, + mChannelsPerFrame: channelConfig.channelCount, + mBitsPerChannel: 0, + mReserved: 0 + ) + } +} + +extension AudioSpecificConfig: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + package var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift new file mode 100644 index 000000000..e098110dc --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/ISO/H264NALUnit.swift @@ -0,0 +1,42 @@ +import CoreMedia +import Foundation + +package enum H264NALUnitType: UInt8, Equatable { + case unspec = 0 + case slice = 1 // P frame + case dpa = 2 + case dpb = 3 + case dpc = 4 + case idr = 5 // I frame + case sei = 6 + case sps = 7 + case pps = 8 + case aud = 9 + case eoseq = 10 + case eostream = 11 + case fill = 12 +} + +// MARK: - +package struct H264NALUnit: NALUnit, Equatable { + package let refIdc: UInt8 + package let type: H264NALUnitType + package let payload: Data + + init(_ data: Data, length: Int) { + self.refIdc = data[0] >> 5 + self.type = H264NALUnitType(rawValue: data[0] & 0x1f) ?? .unspec + self.payload = data.subdata(in: 1..> 1) ?? .unspec + self.temporalIdPlusOne = data[1] & 0b00011111 + self.payload = data.subdata(in: 2.., count: UInt32) { + self.init(data: Data(bytes: bytes, count: Int(count))) + } + + package init?(data: Data?) { + guard let data = data else { + return nil + } + self.init(data: data) + } + + package func toByteStream() -> Data { + let buffer = ByteArray(data: data) + var result = Data() + while 0 < buffer.bytesAvailable { + do { + let length: Int = try Int(buffer.readUInt32()) + result.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) + result.append(try buffer.readBytes(length)) + } catch { + logger.error("\(buffer)") + } + } + return result + } + + static package func toNALFileFormat(_ data: inout Data) { + var lastIndexOf = data.count - 1 + for i in (2..(_ data: inout Data, type: T.Type) -> [T] { + var units: [T] = .init() + var lastIndexOf = data.count - 1 + for i in (2.. [Data] { + var offset = 0 + let header = Int(Self.defaultNALUnitHeaderLength) + let length = buffer.dataBuffer?.dataLength ?? 0 + var result: [Data] = [] + + if !buffer.isNotSync { + if let formatDescription = buffer.formatDescription { + result.append(Data([0x09, 0x10])) + formatDescription.parameterSets.forEach { + result.append($0) + } + } + } else { + result.append(Data([0x09, 0x30])) + } + + try? buffer.dataBuffer?.withUnsafeMutableBytes { buffer in + guard let baseAddress = buffer.baseAddress else { + return + } + while offset + header < length { + var nalUnitLength: UInt32 = 0 + memcpy(&nalUnitLength, baseAddress + offset, header) + nalUnitLength = CFSwapInt32BigToHost(nalUnitLength) + let start = offset + header + let end = start + Int(nalUnitLength) + if end <= length { + result.append(Data(bytes: baseAddress + start, count: Int(nalUnitLength))) + } else { + break + } + offset = end + } + } + return result + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift new file mode 100644 index 000000000..973dec798 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioCaptureUnit.swift @@ -0,0 +1,148 @@ +import AVFoundation + +final class AudioCaptureUnit: CaptureUnit { + let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCaptureUnit.lock") + var mixerSettings: AudioMixerSettings { + get { + audioMixer.settings + } + set { + audioMixer.settings = newValue + } + } + var isMonitoringEnabled = false { + didSet { + if isMonitoringEnabled { + monitor.startRunning() + } else { + monitor.stopRunning() + } + } + } + var isMultiTrackAudioMixingEnabled = false + var inputFormats: [UInt8: AVAudioFormat] { + return audioMixer.inputFormats + } + var output: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> { + AsyncStream<(AVAudioPCMBuffer, AVAudioTime)> { continutation in + self.continutation = continutation + } + } + private(set) var isSuspended = false + private lazy var audioMixer: any AudioMixer = { + if isMultiTrackAudioMixingEnabled { + var mixer = AudioMixerByMultiTrack() + mixer.delegate = self + return mixer + } else { + var mixer = AudioMixerBySingleTrack() + mixer.delegate = self + return mixer + } + }() + private var monitor: AudioMonitor = .init() + + #if os(tvOS) + private var _devices: [UInt8: Any] = [:] + @available(tvOS 17.0, *) + var devices: [UInt8: AudioDeviceUnit] { + set { + _devices = newValue + } + get { + _devices as! [UInt8: AudioDeviceUnit] + } + } + #else + var devices: [UInt8: AudioDeviceUnit] = [:] + #endif + + private let session: (any CaptureSessionConvertible) + private var continutation: AsyncStream<(AVAudioPCMBuffer, AVAudioTime)>.Continuation? + + init(_ session: (some CaptureSessionConvertible), isMultiTrackAudioMixingEnabled: Bool) { + self.session = session + self.isMultiTrackAudioMixingEnabled = isMultiTrackAudioMixingEnabled + } + + #if os(iOS) || os(macOS) || os(tvOS) + @available(tvOS 17.0, *) + func attachAudio(_ track: UInt8, device: AVCaptureDevice?, configuration: AudioDeviceConfigurationBlock?) throws { + try session.configuration { _ in + session.detachCapture(devices[track]) + devices[track] = nil + if let device { + let capture = try AudioDeviceUnit(track, device: device) + capture.setSampleBufferDelegate(self) + try? configuration?(capture) + session.attachCapture(capture) + devices[track] = capture + } + } + } + + @available(tvOS 17.0, *) + func makeDataOutput(_ track: UInt8) -> AudioDeviceUnitDataOutput { + return .init(track: track, audioMixer: audioMixer) + } + #endif + + func append(_ track: UInt8, buffer: CMSampleBuffer) { + audioMixer.append(track, buffer: buffer) + } + + func append(_ track: UInt8, buffer: AVAudioBuffer, when: AVAudioTime) { + switch buffer { + case let buffer as AVAudioPCMBuffer: + audioMixer.append(track, buffer: buffer, when: when) + default: + break + } + } + + @available(tvOS 17.0, *) + func suspend() { + guard !isSuspended else { + return + } + for capture in devices.values { + session.detachCapture(capture) + } + isSuspended = true + } + + @available(tvOS 17.0, *) + func resume() { + guard isSuspended else { + return + } + for capture in devices.values { + session.attachCapture(capture) + } + isSuspended = false + } + + func finish() { + continutation?.finish() + } +} + +extension AudioCaptureUnit: AudioMixerDelegate { + // MARK: AudioMixerDelegate + func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) { + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) { + monitor.inputFormat = audioFormat + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + if let audioBuffer = audioBuffer.clone() { + continutation?.yield((audioBuffer, when)) + } + monitor.append(audioBuffer, when: when) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift new file mode 100644 index 000000000..4b1cd6fef --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioDeviceUnit.swift @@ -0,0 +1,74 @@ +import AVFoundation +import Foundation + +#if os(iOS) || os(tvOS) || os(macOS) +/// Configuration calback block for an AudioDeviceUnit +@available(tvOS 17.0, *) +public typealias AudioDeviceConfigurationBlock = @Sendable (AudioDeviceUnit) throws -> Void + +/// An object that provides the interface to control the AVCaptureDevice's transport behavior. +@available(tvOS 17.0, *) +public final class AudioDeviceUnit: DeviceUnit { + /// The output type that this capture audio data output.. + public typealias Output = AVCaptureAudioDataOutput + + /// The track number. + public let track: UInt8 + /// The input data to a cupture session. + public private(set) var input: AVCaptureInput? + /// The current audio device object. + public private(set) var device: AVCaptureDevice? + /// The output data to a sample buffers. + public private(set) var output: Output? { + didSet { + oldValue?.setSampleBufferDelegate(nil, queue: nil) + } + } + /// The connection from a capture input to a capture output. + public private(set) var connection: AVCaptureConnection? + private var dataOutput: AudioDeviceUnitDataOutput? + + init(_ track: UInt8, device: AVCaptureDevice) throws { + input = try AVCaptureDeviceInput(device: device) + self.track = track + self.device = device + output = AVCaptureAudioDataOutput() + if let input, let output { + connection = AVCaptureConnection(inputPorts: input.ports, output: output) + } + } + + func setSampleBufferDelegate(_ audioUnit: AudioCaptureUnit?) { + dataOutput = audioUnit?.makeDataOutput(track) + output?.setSampleBufferDelegate(dataOutput, queue: audioUnit?.lockQueue) + } +} + +@available(tvOS 17.0, *) +final class AudioDeviceUnitDataOutput: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate { + private let track: UInt8 + private let audioMixer: any AudioMixer + + init(track: UInt8, audioMixer: any AudioMixer) { + self.track = track + self.audioMixer = audioMixer + } + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + audioMixer.append(track, buffer: sampleBuffer) + } +} + +#else +final class AudioDeviceUnit: DeviceUnit { + var output: AVCaptureOutput? + var track: UInt8 + var input: AVCaptureInput? + var device: AVCaptureDevice? + var connection: AVCaptureConnection? + + init(_ track: UInt8, device: AVCaptureDevice) throws { + self.track = track + } +} +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift new file mode 100644 index 000000000..e6a217300 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixer.swift @@ -0,0 +1,29 @@ +@preconcurrency import AVFoundation + +enum AudioMixerError: Swift.Error { + /// Mixer is failed to create the AVAudioConverter. + case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?) + /// Mixer is faild to convert the an audio buffer. + case failedToConvert(error: NSError) + /// Mixer is unable to provide input data. + case unableToProvideInputData + /// Mixer is failed to mix the audio buffers. + case failedToMix(error: any Error) +} + +protocol AudioMixerDelegate: AnyObject { + func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) + func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) +} + +protocol AudioMixer: AnyObject { + var delegate: (any AudioMixerDelegate)? { get set } + var settings: AudioMixerSettings { get set } + var inputFormats: [UInt8: AVAudioFormat] { get } + var outputFormat: AVAudioFormat? { get } + + func append(_ track: UInt8, buffer: CMSampleBuffer) + func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime) +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift new file mode 100644 index 000000000..3d97de3e1 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerByMultiTrack.swift @@ -0,0 +1,211 @@ +import AVFoundation +import CoreAudio +import Foundation + +final class AudioMixerByMultiTrack: AudioMixer { + private static let defaultSampleTime: AVAudioFramePosition = 0 + + weak var delegate: (any AudioMixerDelegate)? + var settings = AudioMixerSettings.default { + didSet { + if let inSourceFormat, settings.invalidateOutputFormat(oldValue) { + outputFormat = settings.makeOutputFormat(inSourceFormat) + } + for (id, trackSettings) in settings.tracks { + tracks[id]?.settings = trackSettings + try? mixerNode?.update(volume: trackSettings.volume, bus: id, scope: .input) + } + } + } + var inputFormats: [UInt8: AVAudioFormat] { + return tracks.compactMapValues { $0.inputFormat } + } + private(set) var outputFormat: AVAudioFormat? { + didSet { + guard let outputFormat, outputFormat != oldValue else { + return + } + for id in tracks.keys { + buffers[id] = .init(outputFormat) + tracks[id] = .init(id: id, outputFormat: outputFormat) + tracks[id]?.delegate = self + } + } + } + private var inSourceFormat: CMFormatDescription? { + didSet { + guard inSourceFormat != oldValue else { + return + } + outputFormat = settings.makeOutputFormat(inSourceFormat) + } + } + private var tracks: [UInt8: AudioMixerTrack] = [:] { + didSet { + tryToSetupAudioNodes() + } + } + private var anchor: AVAudioTime? + private var buffers: [UInt8: AudioRingBuffer] = [:] { + didSet { + if logger.isEnabledFor(level: .trace) { + logger.trace(buffers) + } + } + } + private var mixerNode: MixerNode? + private var sampleTime: AVAudioFramePosition = AudioMixerByMultiTrack.defaultSampleTime + private var outputNode: OutputNode? + + private let inputRenderCallback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer, _: UnsafePointer, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) in + let audioMixer = Unmanaged.fromOpaque(inRefCon).takeUnretainedValue() + let status = audioMixer.render(UInt8(inBusNumber), inNumberFrames: inNumberFrames, ioData: ioData) + guard status == noErr else { + audioMixer.delegate?.audioMixer(audioMixer, errorOccurred: .unableToProvideInputData) + return noErr + } + return status + } + + deinit { + if let mixerNode = mixerNode { + AudioOutputUnitStop(mixerNode.audioUnit) + } + if let outputNode = outputNode { + AudioOutputUnitStop(outputNode.audioUnit) + } + } + + func append(_ track: UInt8, buffer: CMSampleBuffer) { + if settings.mainTrack == track { + inSourceFormat = buffer.formatDescription + } + self.track(for: track)?.append(buffer) + } + + func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime) { + if settings.mainTrack == track { + inSourceFormat = buffer.format.formatDescription + } + self.track(for: track)?.append(buffer, when: when) + } + + private func tryToSetupAudioNodes() { + do { + try setupAudioNodes() + } catch { + logger.error(error) + delegate?.audioMixer(self, errorOccurred: .failedToMix(error: error)) + } + } + + private func setupAudioNodes() throws { + if let mixerNode { + AudioOutputUnitStop(mixerNode.audioUnit) + } + if let outputNode { + AudioOutputUnitStop(outputNode.audioUnit) + } + mixerNode = nil + outputNode = nil + guard let outputFormat else { + return + } + sampleTime = Self.defaultSampleTime + let mixerNode = try MixerNode(format: outputFormat) + try mixerNode.update(busCount: tracks.count, scope: .input) + let busCount = try mixerNode.busCount(scope: .input) + for index in 0..?) -> OSStatus { + guard let buffer = buffers[track] else { + return noErr + } + if buffer.counts == 0 { + guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else { + return noErr + } + for i in 0.. AudioMixerTrack? { + if let track = tracks[id] { + return track + } + guard let outputFormat else { + return nil + } + let track = AudioMixerTrack(id: id, outputFormat: outputFormat) + track.delegate = self + if let trackSettings = settings.tracks[id] { + track.settings = trackSettings + } + tracks[id] = track + buffers[id] = .init(outputFormat) + return track + } +} + +extension AudioMixerByMultiTrack: AudioMixerTrackDelegate { + // MARK: AudioMixerTrackDelegate + func track(_ track: AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + delegate?.audioMixer(self, track: track.id, didInput: audioPCMBuffer, when: when) + buffers[track.id]?.append(audioPCMBuffer, when: when) + if settings.mainTrack == track.id { + if sampleTime == Self.defaultSampleTime { + sampleTime = when.sampleTime + anchor = when + } + mix(numberOfFrames: audioPCMBuffer.frameLength) + } + } + + func track(_ track: AudioMixerTrack, errorOccurred error: AudioMixerError) { + delegate?.audioMixer(self, errorOccurred: error) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift new file mode 100644 index 000000000..df00f7714 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerBySingleTrack.swift @@ -0,0 +1,67 @@ +import AVFoundation +import Foundation + +final class AudioMixerBySingleTrack: AudioMixer { + weak var delegate: (any AudioMixerDelegate)? + var settings = AudioMixerSettings.default { + didSet { + if let trackSettings = settings.tracks[settings.mainTrack] { + track?.settings = trackSettings + } + } + } + var inputFormats: [UInt8: AVAudioFormat] { + var formats: [UInt8: AVAudioFormat] = .init() + if let track = track, let inputFormat = track.inputFormat { + formats[track.id] = inputFormat + } + return formats + } + private(set) var outputFormat: AVAudioFormat? { + didSet { + guard let outputFormat, outputFormat != oldValue else { + return + } + let track = AudioMixerTrack(id: settings.mainTrack, outputFormat: outputFormat) + track.delegate = self + self.track = track + } + } + private var inSourceFormat: CMFormatDescription? { + didSet { + guard inSourceFormat != oldValue else { + return + } + outputFormat = settings.makeOutputFormat(inSourceFormat) + } + } + private var track: AudioMixerTrack? + + func append(_ track: UInt8, buffer: CMSampleBuffer) { + guard settings.mainTrack == track else { + return + } + inSourceFormat = buffer.formatDescription + self.track?.append(buffer) + } + + func append(_ track: UInt8, buffer: AVAudioPCMBuffer, when: AVAudioTime) { + guard settings.mainTrack == track else { + return + } + inSourceFormat = buffer.format.formatDescription + self.track?.append(buffer, when: when) + } +} + +extension AudioMixerBySingleTrack: AudioMixerTrackDelegate { + // MARK: AudioMixerTrackDelegate + func track(_ track: AudioMixerTrack, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + delegate?.audioMixer(self, track: track.id, didInput: buffer, when: when) + delegate?.audioMixer(self, didOutput: buffer.muted(settings.isMuted), when: when) + } + + func track(_ rtrack: AudioMixerTrack, errorOccurred error: AudioMixerError) { + delegate?.audioMixer(self, errorOccurred: error) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift new file mode 100644 index 000000000..5887ccd82 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerSettings.swift @@ -0,0 +1,78 @@ +import AVFoundation +import Foundation + +/// Constraints on the audio mixier settings. +public struct AudioMixerSettings: Codable, Sendable { + /// The default value. + public static let `default` = AudioMixerSettings() + /// Maximum sampleRate supported by the system + public static let maximumSampleRate: Float64 = 48000.0 + + #if os(macOS) + static let commonFormat: AVAudioCommonFormat = .pcmFormatFloat32 + #else + static let commonFormat: AVAudioCommonFormat = .pcmFormatInt16 + #endif + + /// Specifies the sampleRate of audio output. A value of 0 will be the same as the main track source. + public let sampleRate: Float64 + + /// Specifies the channels of audio output. A value of 0 will be the same as the main track source. + /// - Warning: If you are using IOStreamRecorder, please set it to 1 or 2. Otherwise, the audio will not be saved in local recordings. + public let channels: UInt32 + + /// Specifies the muted that indicates whether the audio output is muted. + public var isMuted: Bool + + /// Specifies the main track number. + public var mainTrack: UInt8 + + /// Specifies the track settings. + public var tracks: [UInt8: AudioMixerTrackSettings] + + /// Specifies the maximum number of channels supported by the system + /// - Description: The maximum number of channels to be used when the number of channels is 0 (not set). More than 2 channels are not supported by the service. It is defined to prevent audio issues since recording does not support more than 2 channels. + public var maximumNumberOfChannels: UInt32 = 2 + + /// Creates a new instance of a settings. + public init( + sampleRate: Float64 = 0, + channels: UInt32 = 0, + isMuted: Bool = false, + mainTrack: UInt8 = 0, + tracks: [UInt8: AudioMixerTrackSettings] = .init() + ) { + self.sampleRate = sampleRate + self.channels = channels + self.isMuted = isMuted + self.mainTrack = mainTrack + self.tracks = tracks + } + + func invalidateOutputFormat(_ oldValue: Self) -> Bool { + return !(sampleRate == oldValue.sampleRate && + channels == oldValue.channels) + } + + func makeOutputFormat(_ formatDescription: CMFormatDescription?) -> AVAudioFormat? { + guard let format = AVAudioUtil.makeAudioFormat(formatDescription) else { + return nil + } + let sampleRate = min(sampleRate == 0 ? format.sampleRate : sampleRate, Self.maximumSampleRate) + let channelCount = channels == 0 ? min(format.channelCount, maximumNumberOfChannels) : channels + if let channelLayout = AVAudioUtil.makeChannelLayout(channelCount) { + return .init( + commonFormat: Self.commonFormat, + sampleRate: sampleRate, + interleaved: format.isInterleaved, + channelLayout: channelLayout + ) + } + return .init( + commonFormat: Self.commonFormat, + sampleRate: sampleRate, + channels: min(channelCount, 2), + interleaved: format.isInterleaved + ) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift new file mode 100644 index 000000000..b2fafa8b8 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrack.swift @@ -0,0 +1,127 @@ +import Accelerate +import AVFoundation + +private let kAudioMixerTrack_frameCapacity: AVAudioFrameCount = 1024 + +protocol AudioMixerTrackDelegate: AnyObject { + func track(_ track: AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) + func track(_ track: AudioMixerTrack, errorOccurred error: AudioMixerError) +} + +final class AudioMixerTrack { + let id: UInt8 + let outputFormat: AVAudioFormat + weak var delegate: T? + var settings: AudioMixerTrackSettings = .init() { + didSet { + settings.apply(audioConverter, oldValue: oldValue) + } + } + var inputFormat: AVAudioFormat? { + return audioConverter?.inputFormat + } + private var inSourceFormat: CMFormatDescription? { + didSet { + guard inSourceFormat != oldValue else { + return + } + setUp(inSourceFormat) + } + } + private var audioTime = AudioTime() + private var ringBuffer: AudioRingBuffer? + private var inputBuffer: AVAudioPCMBuffer? + private var outputBuffer: AVAudioPCMBuffer? + private var audioConverter: AVAudioConverter? { + didSet { + guard let audioConverter else { + return + } + audioConverter.downmix = settings.downmix + if let channelMap = settings.validatedChannelMap(audioConverter) { + audioConverter.channelMap = channelMap.map { NSNumber(value: $0) } + } else { + switch audioConverter.outputFormat.channelCount { + case 1: + audioConverter.channelMap = [0] + case 2: + audioConverter.channelMap = (audioConverter.inputFormat.channelCount == 1) ? [0, 0] : [0, 1] + default: + break + } + } + audioConverter.primeMethod = .normal + } + } + + init(id: UInt8, outputFormat: AVAudioFormat) { + self.id = id + self.outputFormat = outputFormat + } + + func append(_ sampleBuffer: CMSampleBuffer) { + inSourceFormat = sampleBuffer.formatDescription + if !audioTime.hasAnchor { + audioTime.anchor(sampleBuffer.presentationTimeStamp, sampleRate: outputFormat.sampleRate) + } + ringBuffer?.append(sampleBuffer) + resample() + } + + func append(_ audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + inSourceFormat = audioBuffer.format.formatDescription + if !audioTime.hasAnchor { + audioTime.anchor(when) + } + ringBuffer?.append(audioBuffer, when: when) + resample() + } + + @inline(__always) + private func resample() { + guard let outputBuffer, let inputBuffer, let ringBuffer else { + return + } + var status: AVAudioConverterOutputStatus? = .endOfStream + repeat { + var error: NSError? + status = audioConverter?.convert(to: outputBuffer, error: &error) { inNumberFrames, status in + if inNumberFrames <= ringBuffer.counts { + _ = ringBuffer.render(inNumberFrames, ioData: inputBuffer.mutableAudioBufferList) + inputBuffer.frameLength = inNumberFrames + status.pointee = .haveData + return inputBuffer + } else { + status.pointee = .noDataNow + return nil + } + } + switch status { + case .haveData: + delegate?.track(self, didOutput: outputBuffer.muted(settings.isMuted), when: audioTime.at) + audioTime.advanced(1024) + case .error: + if let error { + delegate?.track(self, errorOccurred: .failedToConvert(error: error)) + } + default: + break + } + } while(status == .haveData) + } + + private func setUp(_ inSourceFormat: CMFormatDescription?) { + guard let inputFormat = AVAudioUtil.makeAudioFormat(inSourceFormat) else { + delegate?.track(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat)) + return + } + ringBuffer = .init(inputFormat) + inputBuffer = .init(pcmFormat: inputFormat, frameCapacity: kAudioMixerTrack_frameCapacity * 4) + outputBuffer = .init(pcmFormat: outputFormat, frameCapacity: kAudioMixerTrack_frameCapacity) + if logger.isEnabledFor(level: .info) { + logger.info("inputFormat:", inputFormat, ", outputFormat:", outputFormat) + } + audioTime.reset() + audioConverter = .init(from: inputFormat, to: outputFormat) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift new file mode 100644 index 000000000..05cf47713 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMixerTrackSettings.swift @@ -0,0 +1,56 @@ +import AVFoundation + +/// Constraints on the audio mixier track's settings. +public struct AudioMixerTrackSettings: Codable, Sendable { + /// The default value. + public static let `default` = AudioMixerTrackSettings() + + /// Specifies the volume for output. + public var volume: Float + + /// Specifies the muted that indicates whether the audio output is muted. + public var isMuted = false + + /// Specifies the mixes the channels or not. Currently, it supports input sources with 4, 5, 6, and 8 channels. + public var downmix = true + + /// Specifies the map of the output to input channels. + /// ## Example code: + /// ```swift + /// // If you want to use the 3rd and 4th channels from a 4-channel input source for a 2-channel output, you would specify it like this. + /// channelMap = [2, 3] + /// ``` + public var channelMap: [Int]? + + /// Creates a new instance. + public init(volume: Float = 1.0, isMuted: Bool = false, downmix: Bool = true, channelMap: [Int]? = nil) { + self.volume = volume + self.isMuted = isMuted + self.downmix = downmix + self.channelMap = channelMap + } + + func apply(_ converter: AVAudioConverter?, oldValue: AudioMixerTrackSettings) { + guard let converter else { + return + } + if downmix != oldValue.downmix { + converter.downmix = downmix + } + if channelMap != oldValue.channelMap { + if let channelMap = validatedChannelMap(converter) { + converter.channelMap = channelMap.map { NSNumber(value: $0) } + } + } + } + + func validatedChannelMap(_ converter: AVAudioConverter) -> [Int]? { + guard let channelMap, channelMap.count == converter.outputFormat.channelCount else { + return nil + } + for inputChannel in channelMap where converter.inputFormat.channelCount <= inputChannel { + return nil + } + return channelMap + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift new file mode 100644 index 000000000..ff24853a9 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioMonitor.swift @@ -0,0 +1,116 @@ +import AudioUnit +import AVFoundation +import CoreAudio +import CoreMedia +import Foundation + +final class AudioMonitor { + var inputFormat: AVAudioFormat? { + didSet { + if let inputFormat { + ringBuffer = .init(inputFormat) + if isRunning { + audioUnit = makeAudioUnit() + } + } else { + ringBuffer = nil + } + } + } + private(set) var isRunning = false + private var audioUnit: AudioUnit? { + didSet { + if let oldValue { + AudioOutputUnitStop(oldValue) + AudioUnitUninitialize(oldValue) + AudioComponentInstanceDispose(oldValue) + } + if let audioUnit { + AudioOutputUnitStart(audioUnit) + } + } + } + private var ringBuffer: AudioRingBuffer? + + private let callback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer, _: UnsafePointer, _: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) in + let monitor = Unmanaged.fromOpaque(inRefCon).takeUnretainedValue() + return monitor.render(inNumberFrames, ioData: ioData) + } + + deinit { + stopRunning() + } + + func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + guard isRunning else { + return + } + ringBuffer?.append(audioPCMBuffer, when: when) + } + + private func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) -> OSStatus { + guard let ringBuffer else { + return noErr + } + if ringBuffer.counts == 0 { + guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else { + return noErr + } + for i in 0.. AudioUnit? { + guard let inputFormat else { + return nil + } + var inSourceFormat = inputFormat.formatDescription.audioStreamBasicDescription + var audioUnit: AudioUnit? + #if os(macOS) + let subType = kAudioUnitSubType_DefaultOutput + #else + let subType = kAudioUnitSubType_RemoteIO + #endif + var audioComponentDescription = AudioComponentDescription( + componentType: kAudioUnitType_Output, + componentSubType: subType, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + let audioComponent = AudioComponentFindNext(nil, &audioComponentDescription) + if let audioComponent { + AudioComponentInstanceNew(audioComponent, &audioUnit) + } + if let audioUnit { + AudioUnitInitialize(audioUnit) + let ref = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) + var callbackstruct = AURenderCallbackStruct(inputProc: callback, inputProcRefCon: ref) + AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &callbackstruct, UInt32(MemoryLayout.size(ofValue: callbackstruct))) + AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &inSourceFormat, UInt32(MemoryLayout.size(ofValue: inSourceFormat))) + } + return audioUnit + } +} + +extension AudioMonitor: Runner { + // MARK: Running + func startRunning() { + guard !isRunning else { + return + } + audioUnit = makeAudioUnit() + isRunning = true + } + + func stopRunning() { + guard isRunning else { + return + } + audioUnit = nil + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift new file mode 100644 index 000000000..5c911f524 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioNode.swift @@ -0,0 +1,271 @@ +import AVFoundation + +class AudioNode { + enum Error: Swift.Error { + case unableToFindAudioComponent + case unableToCreateAudioUnit(_ status: OSStatus) + case unableToInitializeAudioUnit(_ status: OSStatus) + case unableToUpdateBus(_ status: OSStatus) + case unableToRetrieveValue(_ status: OSStatus) + case unableToConnectToNode(_ status: OSStatus) + } + + enum BusScope: String, CaseIterable { + case input + case output + + var audioUnitScope: AudioUnitScope { + switch self { + case .input: + return kAudioUnitScope_Input + case .output: + return kAudioUnitScope_Output + } + } + } + + let audioUnit: AudioUnit + + init(description: inout AudioComponentDescription) throws { + guard let audioComponent = AudioComponentFindNext(nil, &description) else { + throw Error.unableToFindAudioComponent + } + var audioUnit: AudioUnit? + let status = AudioComponentInstanceNew(audioComponent, &audioUnit) + guard status == noErr, let audioUnit else { + throw Error.unableToCreateAudioUnit(status) + } + self.audioUnit = audioUnit + } + + deinit { + AudioOutputUnitStop(audioUnit) + AudioUnitUninitialize(audioUnit) + AudioComponentInstanceDispose(audioUnit) + } + + func initializeAudioUnit() throws { + let status = AudioUnitInitialize(audioUnit) + guard status == noErr else { + throw Error.unableToInitializeAudioUnit(status) + } + } + + @discardableResult + func connect(to node: AudioNode, sourceBus: Int = 0, destBus: Int = 0) throws -> AudioUnitConnection { + var connection = AudioUnitConnection(sourceAudioUnit: audioUnit, + sourceOutputNumber: UInt32(sourceBus), + destInputNumber: UInt32(destBus)) + let status = AudioUnitSetProperty(node.audioUnit, + kAudioUnitProperty_MakeConnection, + kAudioUnitScope_Input, + 0, + &connection, + UInt32(MemoryLayout.size)) + guard status == noErr else { + throw Error.unableToConnectToNode(status) + } + return connection + } + + func update(format: AVAudioFormat, bus: UInt8, scope: BusScope) throws { + var asbd = format.streamDescription.pointee + let status = AudioUnitSetProperty(audioUnit, + kAudioUnitProperty_StreamFormat, + scope.audioUnitScope, + UInt32(bus), + &asbd, + UInt32(MemoryLayout.size)) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + } + + func format(bus: UInt8, scope: BusScope) throws -> AudioStreamBasicDescription { + var asbd = AudioStreamBasicDescription() + var propertySize = UInt32(MemoryLayout.size) + let status = AudioUnitGetProperty(audioUnit, + kAudioUnitProperty_StreamFormat, + scope.audioUnitScope, + UInt32(bus), + &asbd, + &propertySize) + guard status == noErr else { + throw Error.unableToRetrieveValue(status) + } + return asbd + } + + /// Apple bug: Cannot set to less than 8 buses + func update(busCount: Int, scope: BusScope) throws { + var busCount = UInt32(busCount) + let status = AudioUnitSetProperty(audioUnit, + kAudioUnitProperty_ElementCount, + scope.audioUnitScope, + 0, + &busCount, + UInt32(MemoryLayout.size)) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + } + + func busCount(scope: BusScope) throws -> Int { + var busCount: UInt32 = 0 + var propertySize = UInt32(MemoryLayout.size) + let status = AudioUnitGetProperty(audioUnit, + kAudioUnitProperty_ElementCount, + scope.audioUnitScope, + 0, + &busCount, + &propertySize) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + return Int(busCount) + } +} + +final class MixerNode: AudioNode { + private var mixerComponentDescription = AudioComponentDescription( + componentType: kAudioUnitType_Mixer, + componentSubType: kAudioUnitSubType_MultiChannelMixer, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + init(format: AVAudioFormat) throws { + var mixerDefaultDesc = AudioComponentDescription( + componentType: kAudioUnitType_Mixer, + componentSubType: kAudioUnitSubType_MultiChannelMixer, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + try super.init(description: &mixerDefaultDesc) + + self.mixerComponentDescription = mixerDefaultDesc + } + + func update(inputCallback: inout AURenderCallbackStruct, bus: UInt8) throws { + let status = AudioUnitSetProperty(audioUnit, + kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, + UInt32(bus), + &inputCallback, + UInt32(MemoryLayout.size)) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + } + + func enable(bus: UInt8, scope: AudioNode.BusScope, isEnabled: Bool) throws { + let value: AudioUnitParameterValue = isEnabled ? 1 : 0 + let status = AudioUnitSetParameter(audioUnit, + kMultiChannelMixerParam_Enable, + scope.audioUnitScope, + UInt32(bus), + value, + 0) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + } + + func isEnabled(bus: UInt8, scope: AudioNode.BusScope) throws -> Bool { + var value: AudioUnitParameterValue = 0 + let status = AudioUnitGetParameter(audioUnit, + kMultiChannelMixerParam_Enable, + scope.audioUnitScope, + UInt32(bus), + &value) + guard status == noErr else { + throw Error.unableToRetrieveValue(status) + } + return value != 0 + } + + func update(volume: Float, bus: UInt8, scope: AudioNode.BusScope) throws { + let value: AudioUnitParameterValue = max(0, min(1, volume)) + let status = AudioUnitSetParameter(audioUnit, + kMultiChannelMixerParam_Volume, + scope.audioUnitScope, + UInt32(bus), + value, + 0) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + } + + func volume(bus: UInt8, of scope: AudioNode.BusScope) throws -> Float { + var value: AudioUnitParameterValue = 0 + let status = AudioUnitGetParameter(audioUnit, + kMultiChannelMixerParam_Volume, + scope.audioUnitScope, + UInt32(bus), + &value) + guard status == noErr else { + throw Error.unableToUpdateBus(status) + } + return value + } +} + +final class OutputNode: AudioNode { + enum Error: Swift.Error { + case unableToRenderFrames + case unableToAllocateBuffer + } + + private var outputComponentDescription = AudioComponentDescription( + componentType: kAudioUnitType_Output, + componentSubType: kAudioUnitSubType_GenericOutput, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + var format: AVAudioFormat { + buffer.format + } + private let buffer: AVAudioPCMBuffer + private var timeStamp: AudioTimeStamp = { + var timestamp = AudioTimeStamp() + timestamp.mFlags = .sampleTimeValid + return timestamp + }() + + init(format: AVAudioFormat) throws { + guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024) else { + throw Error.unableToAllocateBuffer + } + self.buffer = buffer + + var outputDefaultDesc = AudioComponentDescription( + componentType: kAudioUnitType_Output, + componentSubType: kAudioUnitSubType_GenericOutput, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + try super.init(description: &outputDefaultDesc) + + self.outputComponentDescription = outputDefaultDesc + } + + func render(numberOfFrames: AVAudioFrameCount, + sampleTime: AVAudioFramePosition) throws -> AVAudioPCMBuffer { + timeStamp.mSampleTime = Float64(sampleTime) + buffer.frameLength = numberOfFrames + let status = AudioUnitRender(audioUnit, + nil, + &timeStamp, + 0, + numberOfFrames, + buffer.mutableAudioBufferList) + guard status == noErr else { + throw Error.unableToRenderFrames + } + return buffer + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift new file mode 100644 index 000000000..030617050 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/AudioRingBuffer.swift @@ -0,0 +1,229 @@ +import Accelerate +import AVFoundation +import CoreAudio +import CoreMedia +import Foundation + +final class AudioRingBuffer { + private static let bufferCounts: UInt32 = 16 + private static let numSamples: UInt32 = 1024 + + var counts: Int { + if tail <= head { + return head - tail + skip + } + return Int(outputBuffer.frameLength) - tail + head + skip + } + + private var head = 0 + private var tail = 0 + private var skip = 0 + private var sampleTime: AVAudioFramePosition = 0 + private var inputFormat: AVAudioFormat + private var inputBuffer: AVAudioPCMBuffer + private var outputBuffer: AVAudioPCMBuffer + + init?(_ inputFormat: AVAudioFormat, bufferCounts: UInt32 = AudioRingBuffer.bufferCounts) { + guard + let inputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples) else { + return nil + } + guard let outputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples * bufferCounts) else { + return nil + } + self.inputFormat = inputFormat + self.inputBuffer = inputBuffer + self.outputBuffer = outputBuffer + self.outputBuffer.frameLength = self.outputBuffer.frameCapacity + } + + func isDataAvailable(_ inNumberFrames: UInt32) -> Bool { + return inNumberFrames <= counts + } + + func append(_ sampleBuffer: CMSampleBuffer) { + guard CMSampleBufferDataIsReady(sampleBuffer) else { + return + } + let targetSampleTime: CMTimeValue + if sampleBuffer.presentationTimeStamp.timescale == Int32(inputBuffer.format.sampleRate) { + targetSampleTime = sampleBuffer.presentationTimeStamp.value + } else { + targetSampleTime = Int64(Double(sampleBuffer.presentationTimeStamp.value) * inputBuffer.format.sampleRate / Double(sampleBuffer.presentationTimeStamp.timescale)) + } + if sampleTime == 0 { + sampleTime = targetSampleTime + } + if outputBuffer.frameLength < sampleBuffer.numSamples { + skip += sampleBuffer.numSamples + return + } + if inputBuffer.frameLength < sampleBuffer.numSamples { + if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) { + self.inputBuffer = buffer + } + } + inputBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples) + let status = CMSampleBufferCopyPCMDataIntoAudioBufferList( + sampleBuffer, + at: 0, + frameCount: Int32(sampleBuffer.numSamples), + into: inputBuffer.mutableAudioBufferList + ) + if status == noErr && kLinearPCMFormatFlagIsBigEndian == ((sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatFlags ?? 0) & kLinearPCMFormatFlagIsBigEndian) { + if inputFormat.isInterleaved { + switch inputFormat.commonFormat { + case .pcmFormatInt16: + let length = sampleBuffer.dataBuffer?.dataLength ?? 0 + var image = vImage_Buffer(data: inputBuffer.mutableAudioBufferList[0].mBuffers.mData, height: 1, width: vImagePixelCount(length / 2), rowBytes: length) + vImageByteSwap_Planar16U(&image, &image, vImage_Flags(kvImageNoFlags)) + default: + break + } + } + } + skip = max(Int(targetSampleTime - sampleTime), 0) + sampleTime += Int64(skip) + append(inputBuffer) + } + + func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + if sampleTime == 0 { + sampleTime = when.sampleTime + } + if inputBuffer.frameLength < audioPCMBuffer.frameLength { + if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: audioPCMBuffer.frameCapacity) { + self.inputBuffer = buffer + } + } + inputBuffer.frameLength = audioPCMBuffer.frameLength + _ = inputBuffer.copy(audioPCMBuffer) + skip = Int(max(when.sampleTime - sampleTime, 0)) + sampleTime += Int64(skip) + append(inputBuffer) + } + + func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?, offset: Int = 0) -> OSStatus { + if 0 < skip { + let numSamples = min(Int(inNumberFrames), skip) + guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else { + return -1 + } + if inputFormat.isInterleaved { + let channelCount = Int(inputFormat.channelCount) + switch inputFormat.commonFormat { + case .pcmFormatInt16: + bufferList[0].mData?.assumingMemoryBound(to: Int16.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples) + case .pcmFormatInt32: + bufferList[0].mData?.assumingMemoryBound(to: Int32.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples) + case .pcmFormatFloat32: + bufferList[0].mData?.assumingMemoryBound(to: Float32.self).advanced(by: offset * channelCount).update(repeating: 0, count: numSamples) + default: + break + } + } else { + for i in 0.. CMClock? { + if #available(macOS 12.3, *) { + return session.synchronizationClock + } else { + return session.masterClock + } + } + + func makeSession(_ sessionPreset: AVCaptureSession.Preset) -> AVCaptureSession { + let session = AVCaptureSession() + if session.canSetSessionPreset(sessionPreset) { + session.sessionPreset = sessionPreset + } + return session + } + + func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool { + false + } + } + #elseif os(iOS) || os(tvOS) + struct Capabilities { + static var isMultiCamSupported: Bool { + if #available(tvOS 17.0, *) { + return AVCaptureMultiCamSession.isMultiCamSupported + } else { + return false + } + } + + var isMultiCamSessionEnabled = false { + didSet { + if !Self.isMultiCamSupported { + isMultiCamSessionEnabled = false + logger.info("This device can't support the AVCaptureMultiCamSession.") + } + } + } + + #if os(iOS) + func synchronizationClock(_ session: AVCaptureSession) -> CMClock? { + if #available(iOS 15.4, *) { + return session.synchronizationClock + } else { + return session.masterClock + } + } + #endif + + @available(tvOS 17.0, *) + func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool { + if #available(iOS 16.0, tvOS 17.0, *) { + session.isMultitaskingCameraAccessEnabled + } else { + false + } + } + + @available(tvOS 17.0, *) + func makeSession(_ sessionPreset: AVCaptureSession.Preset) -> AVCaptureSession { + let session: AVCaptureSession + if isMultiCamSessionEnabled { + session = AVCaptureMultiCamSession() + } else { + session = AVCaptureSession() + } + if session.canSetSessionPreset(sessionPreset) { + session.sessionPreset = sessionPreset + } + return session + } + } + #else + struct Capabilities { + static let isMultiCamSupported = false + + var isMultiCamSessionEnabled = false { + didSet { + isMultiCamSessionEnabled = false + } + } + + func synchronizationClock(_ session: AVCaptureSession) -> CMClock? { + return session.synchronizationClock + } + + func isMultitaskingCameraAccessEnabled(_ session: AVCaptureSession) -> Bool { + false + } + } + #endif +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift new file mode 100644 index 000000000..b8f7be12d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureSession.swift @@ -0,0 +1,365 @@ +import AVFoundation + +protocol CaptureSessionConvertible: Runner { + #if !os(visionOS) + @available(tvOS 17.0, *) + var sessionPreset: AVCaptureSession.Preset { get set } + #endif + + var isCapturing: Bool { get } + var isInturreped: AsyncStream { get } + var runtimeError: AsyncStream { get } + var synchronizationClock: CMClock? { get } + var isMultiCamSessionEnabled: Bool { get set } + + @available(tvOS 17.0, *) + var isMultitaskingCameraAccessEnabled: Bool { get } + + @available(tvOS 17.0, *) + func attachCapture(_ capture: (any DeviceUnit)?) + @available(tvOS 17.0, *) + func detachCapture(_ capture: (any DeviceUnit)?) + @available(tvOS 17.0, *) + func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void) rethrows + @available(tvOS 17.0, *) + func startRunningIfNeeded() +} + +#if os(macOS) || os(iOS) || os(visionOS) +final class CaptureSession { + var isMultiCamSessionEnabled: Bool { + get { + capabilities.isMultiCamSessionEnabled + } + set { + capabilities.isMultiCamSessionEnabled = newValue + } + } + + var isCapturing: Bool { + session.isRunning + } + + var isMultitaskingCameraAccessEnabled: Bool { + capabilities.isMultitaskingCameraAccessEnabled(session) + } + + @AsyncStreamedFlow + var isInturreped: AsyncStream + + @AsyncStreamedFlow + var runtimeError: AsyncStream + + var synchronizationClock: CMClock? { + capabilities.synchronizationClock(session) + } + + private(set) var isRunning = false + + #if !os(visionOS) + var sessionPreset: AVCaptureSession.Preset = .default { + didSet { + guard sessionPreset != oldValue, session.canSetSessionPreset(sessionPreset) else { + return + } + session.beginConfiguration() + session.sessionPreset = sessionPreset + session.commitConfiguration() + } + } + private(set) lazy var session: AVCaptureSession = capabilities.makeSession(sessionPreset) + #else + private(set) lazy var session = AVCaptureSession() + #endif + + private lazy var capabilities = Capabilities() + + deinit { + if session.isRunning { + session.stopRunning() + } + } +} +#elseif os(tvOS) +final class CaptureSession { + var isMultiCamSessionEnabled: Bool { + get { + capabilities.isMultiCamSessionEnabled + } + set { + capabilities.isMultiCamSessionEnabled = newValue + } + } + + var isCapturing: Bool { + if #available(tvOS 17.0, *) { + session.isRunning + } else { + false + } + } + + var isMultitaskingCameraAccessEnabled: Bool { + if #available(tvOS 17.0, *) { + capabilities.isMultitaskingCameraAccessEnabled(session) + } else { + false + } + } + + @AsyncStreamedFlow + var isInturreped: AsyncStream + + @AsyncStreamedFlow + var runtimeError: AsyncStream + + var synchronizationClock: CMClock? { + if #available(tvOS 17.0, *) { + return session.synchronizationClock + } else { + return nil + } + } + + private(set) var isRunning = false + + private var _session: Any? + /// The capture session instance. + @available(tvOS 17.0, *) + var session: AVCaptureSession { + if _session == nil { + _session = capabilities.makeSession(sessionPreset) + } + return _session as! AVCaptureSession + } + + private var _sessionPreset: Any? + @available(tvOS 17.0, *) + var sessionPreset: AVCaptureSession.Preset { + get { + if _sessionPreset == nil { + _sessionPreset = AVCaptureSession.Preset.default + } + return _sessionPreset as! AVCaptureSession.Preset + } + set { + guard sessionPreset != newValue, session.canSetSessionPreset(newValue) else { + return + } + session.beginConfiguration() + session.sessionPreset = newValue + session.commitConfiguration() + } + } + + private lazy var capabilities = Capabilities() + + deinit { + guard #available(tvOS 17.0, *) else { + return + } + if session.isRunning { + session.stopRunning() + } + } +} +#endif + +extension CaptureSession: CaptureSessionConvertible { + // MARK: CaptureSessionConvertible + @available(tvOS 17.0, *) + func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void) rethrows { + session.beginConfiguration() + defer { + session.commitConfiguration() + } + try lambda(session) + } + + @available(tvOS 17.0, *) + func attachCapture(_ capture: (any DeviceUnit)?) { + guard let capture else { + return + } + #if !os(visionOS) + if let connection = capture.connection { + if let input = capture.input, session.canAddInput(input) { + session.addInputWithNoConnections(input) + } + if let output = capture.output, session.canAddOutput(output) { + session.addOutputWithNoConnections(output) + } + if session.canAddConnection(connection) { + session.addConnection(connection) + } + return + } + #endif + if let input = capture.input, session.canAddInput(input) { + session.addInput(input) + } + if let output = capture.output, session.canAddOutput(output) { + session.addOutput(output) + } + } + + @available(tvOS 17.0, *) + func detachCapture(_ capture: (any DeviceUnit)?) { + guard let capture else { + return + } + #if !os(visionOS) + if let connection = capture.connection { + if capture.output?.connections.contains(connection) == true { + session.removeConnection(connection) + } + } + #endif + if let input = capture.input, session.inputs.contains(input) { + session.removeInput(input) + } + if let output = capture.output, session.outputs.contains(output) { + session.removeOutput(output) + } + } + + @available(tvOS 17.0, *) + func startRunningIfNeeded() { + guard isRunning && !session.isRunning else { + return + } + session.startRunning() + isRunning = session.isRunning + } + + @available(tvOS 17.0, *) + private func addSessionObservers(_ session: AVCaptureSession) { + NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(_:)), name: .AVCaptureSessionRuntimeError, object: session) + #if os(iOS) || os(tvOS) || os(visionOS) + NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded(_:)), name: .AVCaptureSessionInterruptionEnded, object: session) + NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted(_:)), name: .AVCaptureSessionWasInterrupted, object: session) + #endif + } + + @available(tvOS 17.0, *) + private func removeSessionObservers(_ session: AVCaptureSession) { + #if os(iOS) || os(tvOS) || os(visionOS) + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionWasInterrupted, object: session) + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionInterruptionEnded, object: session) + #endif + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionRuntimeError, object: session) + _runtimeError.finish() + } + + @available(tvOS 17.0, *) + @objc + private func sessionRuntimeError(_ notification: NSNotification) { + guard + let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else { + return + } + _runtimeError.yield(AVError(_nsError: errorValue)) + } + + #if os(iOS) || os(tvOS) || os(visionOS) + @available(tvOS 17.0, *) + @objc + private func sessionWasInterrupted(_ notification: Notification) { + _isInturreped.yield(true) + } + + @available(tvOS 17.0, *) + @objc + private func sessionInterruptionEnded(_ notification: Notification) { + _isInturreped.yield(false) + } + #endif +} + +extension CaptureSession: Runner { + // MARK: Runner + func startRunning() { + guard !isRunning else { + return + } + if #available(tvOS 17.0, *) { + addSessionObservers(session) + session.startRunning() + isRunning = session.isRunning + } else { + isRunning = true + } + } + + func stopRunning() { + guard isRunning else { + return + } + if #available(tvOS 17.0, *) { + removeSessionObservers(session) + session.stopRunning() + isRunning = session.isRunning + } else { + isRunning = false + } + } +} + +final class NullCaptureSession: CaptureSessionConvertible { + #if !os(visionOS) + @available(tvOS 17.0, *) + var sessionPreset: AVCaptureSession.Preset { + get { + return .default + } + set { + } + } + #endif + + let isCapturing: Bool = false + var isMultiCamSessionEnabled = false + let isMultitaskingCameraAccessEnabled = false + let synchronizationClock: CMClock? = nil + + @AsyncStreamed(false) + var isInturreped: AsyncStream + + @AsyncStreamedFlow + var runtimeError: AsyncStream + + private(set) var isRunning = false + + @available(tvOS 17.0, *) + func attachCapture(_ capture: (any DeviceUnit)?) { + } + + @available(tvOS 17.0, *) + func detachCapture(_ capture: (any DeviceUnit)?) { + } + + @available(tvOS 17.0, *) + func configuration(_ lambda: (AVCaptureSession) throws -> Void) rethrows { + } + + func startRunningIfNeeded() { + } +} + +extension NullCaptureSession: Runner { + // MARK: Runner + func startRunning() { + guard !isRunning else { + return + } + isRunning = true + } + + func stopRunning() { + guard isRunning else { + return + } + _runtimeError.finish() + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift new file mode 100644 index 000000000..6a959f7ee --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/CaptureUnit.swift @@ -0,0 +1,15 @@ +import AVFAudio +import Foundation + +protocol CaptureUnit { + var lockQueue: DispatchQueue { get } + var isSuspended: Bool { get } + + @available(tvOS 17.0, *) + func suspend() + + @available(tvOS 17.0, *) + func resume() + + func finish() +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift new file mode 100644 index 000000000..1d6aa414a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DeviceUnit.swift @@ -0,0 +1,15 @@ +import AVFoundation +import Foundation + +@available(tvOS 17.0, *) +protocol DeviceUnit { + associatedtype Output: AVCaptureOutput + + var track: UInt8 { get } + var input: AVCaptureInput? { get } + var output: Output? { get } + var device: AVCaptureDevice? { get } + var connection: AVCaptureConnection? { get } + + init(_ track: UInt8, device: AVCaptureDevice) throws +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift new file mode 100644 index 000000000..ccd90c2aa --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/DynamicRangeMode.swift @@ -0,0 +1,108 @@ +import AVFoundation +import CoreImage + +/// Defines the dynamic range mode used for rendering or video processing. +/// +/// - Note: Live streaming is **not yet supported** when using HDR mode. +public enum DynamicRangeMode: Sendable { + private static let colorSpaceITUR709 = CGColorSpace(name: CGColorSpace.itur_709) + private static let colorSpaceITUR2100 = CGColorSpace(name: CGColorSpace.itur_2100_HLG) + + /// Standard Dynamic Range (SDR) mode. + /// Uses the sRGB color space and standard luminance range. + case sdr + + /// High Dynamic Range (HDR) mode. + /// Uses the ITU-R BT.2100 HLG color space for wide color gamut and extended brightness. + case hdr + + var videoFormat: OSType { + switch self { + case .sdr: + return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + case .hdr: + return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange + } + } + + var colorSpace: CGColorSpace? { + switch self { + case .sdr: + return DynamicRangeMode.colorSpaceITUR709 + case .hdr: + return DynamicRangeMode.colorSpaceITUR2100 + } + } + + private var contextOptions: [CIContextOption: Any]? { + guard let colorSpace else { + return nil + } + return [ + .workingFormat: CIFormat.RGBAh.rawValue, + .workingColorSpace: colorSpace, + .outputColorSpace: colorSpace + ] + } + + private var pixelFormat: OSType { + switch self { + case .sdr: + return kCVPixelFormatType_32ARGB + case .hdr: + return kCVPixelFormatType_64RGBAHalf + } + } + + func attach(_ pixelBuffer: CVPixelBuffer) { + switch self { + case .sdr: + break + case .hdr: + CVBufferSetAttachment( + pixelBuffer, + kCVImageBufferColorPrimariesKey, + kCVImageBufferColorPrimaries_ITU_R_2020, + .shouldPropagate + ) + CVBufferSetAttachment( + pixelBuffer, + kCVImageBufferTransferFunctionKey, + kCVImageBufferTransferFunction_ITU_R_2100_HLG, + .shouldPropagate + ) + CVBufferSetAttachment( + pixelBuffer, + kCVImageBufferYCbCrMatrixKey, + kCVImageBufferYCbCrMatrix_ITU_R_2020, + .shouldPropagate + ) + } + } + + func makeCIContext() -> CIContext { + guard let device = MTLCreateSystemDefaultDevice() else { + return CIContext(options: contextOptions) + } + return CIContext(mtlDevice: device, options: contextOptions) + } + + func makePixelBufferAttributes(_ size: CGSize) -> CFDictionary { + switch self { + case .sdr: + return [ + kCVPixelBufferPixelFormatTypeKey: NSNumber(value: pixelFormat), + kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue, + kCVPixelBufferWidthKey: NSNumber(value: Int(size.width)), + kCVPixelBufferHeightKey: NSNumber(value: Int(size.height)) + ] as CFDictionary + case .hdr: + return [ + kCVPixelBufferPixelFormatTypeKey: NSNumber(value: videoFormat), + kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue, + kCVPixelBufferWidthKey: NSNumber(value: Int(size.width)), + kCVPixelBufferHeightKey: NSNumber(value: Int(size.height)) + ] as CFDictionary + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift new file mode 100644 index 000000000..707cb65c7 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixer.swift @@ -0,0 +1,567 @@ +@preconcurrency import AVFoundation + +#if canImport(UIKit) +import UIKit +#endif + +/// An actor that mixies audio and video for streaming. +public final actor MediaMixer { + static let defaultFrameRate: Float64 = 30 + + /// The error domain codes. + public enum Error: Swift.Error { + /// The mixer failed to failed to attach device. + case failedToAttach(_ error: any Swift.Error) + /// The mixer missing a device of track. + case deviceNotFound + } + + /// An enumeration defines the capture session mode used for video/audio input. + public enum CaptureSessionMode: Sendable { + /// Uses a standard `AVCaptureSession` + case single + /// Uses an `AVCaptureMultiCamSession` + case multi + /// Does not use a `AVCaptureSession`. Set this when using ReplayKit, as AVCaptureSession is not required. + case manual + + func makeSession() -> (any CaptureSessionConvertible) { + switch self { + case .single: + let session = CaptureSession() + session.isMultiCamSessionEnabled = false + return session + case .multi: + let session = CaptureSession() + session.isMultiCamSessionEnabled = true + return session + case .manual: + return NullCaptureSession() + } + } + } + + /// The offscreen rendering object. + @ScreenActor + public private(set) lazy var screen = Screen() + + /// The capture session mode. + public let captureSessionMode: CaptureSessionMode + + /// The feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit. + public let isMultiTrackAudioMixingEnabled: Bool + + /// The dynamic range mode. + public private(set) var dynamicRangeMode: DynamicRangeMode = .sdr + + #if os(iOS) || os(tvOS) + /// The AVCaptureMultiCamSession enabled. + @available(tvOS 17.0, *) + public var isMultiCamSessionEnabled: Bool { + session.isMultiCamSessionEnabled + } + #endif + + #if os(iOS) || os(macOS) || os(tvOS) + /// The device torch indicating wheter the turn on(TRUE) or not(FALSE). + public var isTorchEnabled: Bool { + videoIO.isTorchEnabled + } + + /// The sessionPreset for the AVCaptureSession. + @available(tvOS 17.0, *) + public var sessionPreset: AVCaptureSession.Preset { + session.sessionPreset + } + #endif + + /// The audio monitoring enabled or not. + public var isMonitoringEnabled: Bool { + audioIO.isMonitoringEnabled + } + + /// The audio mixer settings. + public var audioMixerSettings: AudioMixerSettings { + audioIO.mixerSettings + } + + /// The video mixer settings. + public var videoMixerSettings: VideoMixerSettings { + videoIO.mixerSettings + } + + /// The audio input formats. + public var audioInputFormats: [UInt8: AVAudioFormat] { + audioIO.inputFormats + } + + /// The video input formats. + public var videoInputFormats: [UInt8: CMFormatDescription] { + videoIO.inputFormats + } + + /// The output frame rate. + public private(set) var frameRate = MediaMixer.defaultFrameRate + + /// The AVCaptureSession is in a running state or not. + @available(tvOS 17.0, *) + public var isCapturing: Bool { + session.isCapturing + } + + /// The interrupts events is occured or not. + public var isInterputted: AsyncStream { + session.isInturreped + } + + #if os(iOS) || os(macOS) + /// The video orientation for stream. + public var videoOrientation: AVCaptureVideoOrientation { + videoIO.videoOrientation + } + #endif + + public private(set) var isRunning = false + + private var outputs: [any MediaMixerOutput] = [] + private var subscriptions: [Task] = [] + private var isInBackground = false + private lazy var audioIO = AudioCaptureUnit(session, isMultiTrackAudioMixingEnabled: isMultiTrackAudioMixingEnabled) + private lazy var videoIO = VideoCaptureUnit(session) + private lazy var session: (any CaptureSessionConvertible) = captureSessionMode.makeSession() + @ScreenActor + private lazy var displayLink = DisplayLinkChoreographer() + + /// Creates a new instance. + /// + /// - Parameters: + /// - captureSessionMode: Specifies the capture session mode. + /// - multiTrackAudioMixingEnabled: Specifies the feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit. + public init( + captureSessionMode: CaptureSessionMode = .single, + multiTrackAudioMixingEnabled: Bool = false + ) { + self.captureSessionMode = captureSessionMode + self.isMultiTrackAudioMixingEnabled = multiTrackAudioMixingEnabled + } + + /// Attaches a video device. + /// + /// If you want to use the multi-camera feature, please make create a MediaMixer with a multiCamSession mode for iOS. + /// ```swift + /// let mixer = MediaMixer(captureSessionMode: .multi) + /// ``` + @available(tvOS 17.0, *) + public func attachVideo(_ device: AVCaptureDevice?, track: UInt8 = 0, configuration: VideoDeviceConfigurationBlock? = nil) async throws { + return try await withCheckedThrowingContinuation { continuation in + do { + try videoIO.attachVideo(track, device: device, configuration: configuration) + continuation.resume() + } catch { + continuation.resume(throwing: Error.failedToAttach(error)) + } + } + } + + /// Configurations for a video device. + @available(tvOS 17.0, *) + public func configuration(video track: UInt8, configuration: VideoDeviceConfigurationBlock) throws { + guard let unit = videoIO.devices[track] else { + throw Error.deviceNotFound + } + try configuration(unit) + } + + #if os(iOS) || os(macOS) || os(tvOS) + /// Attaches an audio device. + /// + /// - Attention: You can perform multi-microphone capture by specifying as follows on macOS. Unfortunately, it seems that only one microphone is available on iOS. + /// + /// ```swift + /// let mixer = MediaMixer(multiTrackAudioMixingEnabled: true) + /// + /// var audios = AVCaptureDevice.devices(for: .audio) + /// if let device = audios.removeFirst() { + /// mixer.attachAudio(device, track: 0) + /// } + /// if let device = audios.removeFirst() { + /// mixer.attachAudio(device, track: 1) + /// } + /// ``` + @available(tvOS 17.0, *) + public func attachAudio(_ device: AVCaptureDevice?, track: UInt8 = 0, configuration: AudioDeviceConfigurationBlock? = nil) async throws { + return try await withCheckedThrowingContinuation { continuation in + do { + try audioIO.attachAudio(track, device: device, configuration: configuration) + continuation.resume() + } catch { + continuation.resume(throwing: Error.failedToAttach(error)) + } + } + } + + /// Configurations for an audio device. + @available(tvOS 17.0, *) + public func configuration(audio track: UInt8, configuration: AudioDeviceConfigurationBlock) throws { + guard let unit = audioIO.devices[track] else { + throw Error.deviceNotFound + } + try configuration(unit) + } + + /// Sets the device torch indicating wheter the turn on(TRUE) or not(FALSE). + public func setTorchEnabled(_ torchEnabled: Bool) { + videoIO.isTorchEnabled = torchEnabled + } + + /// Sets the sessionPreset for the AVCaptureSession. + @available(tvOS 17.0, *) + public func setSessionPreset(_ sessionPreset: AVCaptureSession.Preset) { + session.sessionPreset = sessionPreset + } + #endif + + #if os(iOS) || os(macOS) + /// Sets the video orientation for stream. + public func setVideoOrientation(_ videoOrientation: AVCaptureVideoOrientation) { + videoIO.videoOrientation = videoOrientation + // https://github.com/shogo4405/HaishinKit.swift/issues/190 + if videoIO.isTorchEnabled { + videoIO.isTorchEnabled = true + } + } + #endif + + /// Appends a CMSampleBuffer. + /// - Parameters: + /// - sampleBuffer:The sample buffer to append. + /// - track: Track number used for mixing + public func append(_ sampleBuffer: CMSampleBuffer, track: UInt8 = 0) { + switch sampleBuffer.formatDescription?.mediaType { + case .audio?: + audioIO.append(track, buffer: sampleBuffer) + case .video?: + videoIO.append(track, buffer: sampleBuffer) + default: + break + } + } + + /// Sets the video mixier settings. + public func setVideoMixerSettings(_ settings: VideoMixerSettings) { + let mode = self.videoMixerSettings.mode + if mode != settings.mode { + setVideoRenderingMode(settings.mode) + } + videoIO.mixerSettings = settings + Task { @ScreenActor in + screen.videoTrackScreenObject.track = settings.mainTrack + } + } + + /// Sets the output frame rate of the mixer. + /// + /// This is distinct from the camera capture rate, which can be configured separately as shown below. + /// ```swift + /// try? await mixer.configuration(video: 0) { video in + /// try? video.setFrameRate(fps) + /// } + /// ``` + public func setFrameRate(_ frameRate: Float64) throws { + switch videoMixerSettings.mode { + case .passthrough: + if #available(tvOS 17.0, *) { + try videoIO.devices.first?.value.setFrameRate(frameRate) + } + case .offscreen: + Task { @ScreenActor in + displayLink.preferredFramesPerSecond = Int(frameRate) + } + } + self.frameRate = frameRate + } + + /// Sets the dynamic range mode. + /// + /// Warnings: It takes some time for changes to be applied to the camera device, so it’s better not to modify it dynamically during a live stream. + public func setDynamicRangeMode(_ dynamicRangeMode: DynamicRangeMode) throws { + guard self.dynamicRangeMode != dynamicRangeMode else { + return + } + Task { @ScreenActor in + screen.dynamicRangeMode = dynamicRangeMode + } + videoIO.dynamicRangeMode = dynamicRangeMode + self.dynamicRangeMode = dynamicRangeMode + } + + /// Sets the audio mixer settings. + public func setAudioMixerSettings(_ settings: AudioMixerSettings) { + audioIO.mixerSettings = settings + } + + /// Sets the audio monitoring enabled or not. + public func setMonitoringEnabled(_ monitoringEnabled: Bool) { + audioIO.isMonitoringEnabled = monitoringEnabled + } + + /// Starts capturing from input devices. + /// + /// Internally, it is called either when the view is attached or just before publishing. In other cases, please call this method if you want to manually start the capture. + @available(tvOS 17.0, *) + public func startCapturing() { + guard !session.isRunning else { + session.startRunningIfNeeded() + return + } + session.startRunning() + let synchronizationClock = session.synchronizationClock + Task { @ScreenActor in + screen.synchronizationClock = synchronizationClock + } + Task { + for await runtimeError in session.runtimeError { + await sessionRuntimeErrorOccured(runtimeError) + } + } + } + + /// Stops capturing from input devices. + @available(tvOS 17.0, *) + public func stopCapturing() { + guard session.isRunning else { + return + } + session.stopRunning() + Task { @ScreenActor in + screen.synchronizationClock = nil + } + } + + /// Appends an AVAudioBuffer. + /// - Parameters: + /// - audioBuffer:The audio buffer to append. + /// - when: The audio time to append. + /// - track: Track number used for mixing. + public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime, track: UInt8 = 0) { + audioIO.append(track, buffer: audioBuffer, when: when) + } + + /// Configurations for the AVCaptureSession. + /// - Attention: Internally, there is no need for developers to call beginConfiguration() and func commitConfiguration() as they are called automatically. + @available(tvOS 17.0, *) + public func configuration(_ lambda: @Sendable (_ session: AVCaptureSession) throws -> Void) rethrows { + try session.configuration(lambda) + } + + /// Adds an output observer. + public func addOutput(_ output: some MediaMixerOutput) { + guard !outputs.contains(where: { $0 === output }) else { + return + } + outputs.append(output) + } + + /// Removes an output observer. + public func removeOutput(_ output: some MediaMixerOutput) { + if let index = outputs.firstIndex(where: { $0 === output }) { + outputs.remove(at: index) + } + } + + private func setVideoRenderingMode(_ mode: VideoMixerSettings.Mode) { + guard isRunning else { + return + } + switch mode { + case .passthrough: + Task { @ScreenActor in + displayLink.stopRunning() + } + case .offscreen: + Task { @ScreenActor in + displayLink.preferredFramesPerSecond = await Int(frameRate) + displayLink.startRunning() + for await updateFrame in displayLink.updateFrames { + guard let buffer = screen.makeSampleBuffer(updateFrame) else { + continue + } + for output in await self.outputs where await output.videoTrackId == UInt8.max { + output.mixer(self, didOutput: buffer) + } + } + } + } + } + + #if os(iOS) || os(tvOS) || os(visionOS) + private func setInBackground(_ isInBackground: Bool) { + self.isInBackground = isInBackground + guard #available(tvOS 17.0, *), !session.isMultitaskingCameraAccessEnabled else { + return + } + if isInBackground { + videoIO.suspend() + } else { + videoIO.resume() + session.startRunningIfNeeded() + } + } + + @available(tvOS 17.0, *) + private func didAudioSessionInterruption(_ notification: Notification) { + guard + let userInfo = notification.userInfo, + let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + return + } + switch type { + case .began: + // video capture continues even while an incoming call is ringing. + audioIO.suspend() + session.startRunningIfNeeded() + logger.info("Audio suspended due to system interruption.") + case .ended: + let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt + let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue ?? 0) + if options.contains(.shouldResume) { + audioIO.resume() + } + logger.info("Audio resumed after system interruption") + default: () + } + } + #endif + + @available(tvOS 17.0, *) + private func sessionRuntimeErrorOccured(_ error: AVError) async { + switch error.code { + #if os(iOS) || os(tvOS) || os(visionOS) + case .mediaServicesWereReset: + session.startRunningIfNeeded() + #endif + #if os(iOS) || os(tvOS) || os(macOS) + case .unsupportedDeviceActiveFormat: + guard let device = error.device, let format = device.videoFormat( + width: session.sessionPreset.width ?? Int32.max, + height: session.sessionPreset.height ?? Int32.max, + frameRate: frameRate, + isMultiCamSupported: session.isMultiCamSessionEnabled + ), device.activeFormat != format else { + return + } + do { + try device.lockForConfiguration() + device.activeFormat = format + if format.isFrameRateSupported(frameRate) { + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + } + device.unlockForConfiguration() + session.startRunningIfNeeded() + } catch { + logger.warn(error) + } + #endif + case .unknown: + // AVFoundationErrorDomain Code=-11800 "The operation could not be completed" + if error.errorCode == -11800 && !isInBackground { + session.startRunningIfNeeded() + } + default: + break + } + } +} + +extension MediaMixer: AsyncRunner { + // MARK: AsyncRunner + public func startRunning() async { + guard !isRunning else { + return + } + isRunning = true + setVideoRenderingMode(videoMixerSettings.mode) + if #available(tvOS 17.0, *) { + startCapturing() + } + Task { + for await inputs in videoIO.inputs { + Task { @ScreenActor in + let videoMixerSettings = await self.videoMixerSettings + guard videoMixerSettings.mode == .offscreen else { + return + } + let sampleBuffer = inputs.1 + screen.append(inputs.0, buffer: sampleBuffer) + if videoMixerSettings.mainTrack == inputs.0 { + screen.setVideoCaptureLatency(sampleBuffer.presentationTimeStamp) + } + } + for output in outputs where await output.videoTrackId == inputs.0 { + output.mixer(self, didOutput: inputs.1) + } + } + } + Task { + for await video in videoIO.output { + for output in outputs where await output.videoTrackId == UInt8.max { + output.mixer(self, didOutput: video) + } + } + } + Task { + for await audio in audioIO.output { + for output in outputs where await output.audioTrackId == UInt8.max { + output.mixer(self, didOutput: audio.0, when: audio.1) + } + } + } + #if os(iOS) || os(tvOS) || os(visionOS) + subscriptions.append(Task { + for await _ in NotificationCenter.default.notifications( + named: UIApplication.didEnterBackgroundNotification + ) { + setInBackground(true) + } + }) + subscriptions.append(Task { + for await _ in NotificationCenter.default.notifications( + named: UIApplication.willEnterForegroundNotification + ) { + setInBackground(false) + } + }) + if #available(tvOS 17.0, *) { + subscriptions.append(Task { + for await notification in NotificationCenter.default.notifications( + named: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance() + ) { + didAudioSessionInterruption(notification) + } + }) + } + #endif + } + + public func stopRunning() async { + guard isRunning else { + return + } + if #available(tvOS 17.0, *) { + stopCapturing() + } + audioIO.finish() + videoIO.finish() + subscriptions.forEach { $0.cancel() } + subscriptions.removeAll() + // Wait for the task to finish to prevent memory leaks. + await Task { @ScreenActor in + displayLink.stopRunning() + screen.reset() + }.value + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift new file mode 100644 index 000000000..a8681dc9d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/MediaMixerOutput.swift @@ -0,0 +1,15 @@ +import AVFoundation + +/// A delegate protocol implements to receive stream output events. +public protocol MediaMixerOutput: AnyObject, Sendable { + /// Tells the receiver to a video track id. + var videoTrackId: UInt8? { get async } + /// Tells the receiver to an audio track id. + var audioTrackId: UInt8? { get async } + /// Tells the receiver to a video buffer incoming. + func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) + /// Tells the receiver to an audio buffer incoming. + func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) + /// Selects track id for streaming. + func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift new file mode 100644 index 000000000..4f298c308 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoCaptureUnit.swift @@ -0,0 +1,182 @@ +import AVFoundation +import CoreImage + +final class VideoCaptureUnit: CaptureUnit { + enum Error: Swift.Error { + case multiCamNotSupported + } + + let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCaptureUnit.lock") + + private(set) var isSuspended = false + + var mixerSettings: VideoMixerSettings { + get { + return videoMixer.settings + } + set { + videoMixer.settings = newValue + } + } + + var inputFormats: [UInt8: CMFormatDescription] { + return videoMixer.inputFormats + } + + #if os(iOS) || os(tvOS) || os(macOS) + var isTorchEnabled = false { + didSet { + guard #available(tvOS 17.0, *) else { + return + } + setTorchMode(isTorchEnabled ? .on : .off) + } + } + #endif + + @available(tvOS 17.0, *) + var hasDevice: Bool { + !devices.lazy.filter { $0.value.device != nil }.isEmpty + } + + #if os(iOS) || os(macOS) + var videoOrientation: AVCaptureVideoOrientation = .portrait { + didSet { + guard videoOrientation != oldValue else { + return + } + session.configuration { _ in + for capture in devices.values { + capture.videoOrientation = videoOrientation + } + } + } + } + #endif + + @AsyncStreamedFlow + var inputs: AsyncStream<(UInt8, CMSampleBuffer)> + + @AsyncStreamedFlow + var output: AsyncStream + + var dynamicRangeMode: DynamicRangeMode = .sdr { + didSet { + guard dynamicRangeMode != oldValue, #available(tvOS 17.0, *) else { + return + } + try? session.configuration { _ in + for capture in devices.values { + try capture.setDynamicRangeMode(dynamicRangeMode) + } + } + } + } + + private lazy var videoMixer = { + var videoMixer = VideoMixer() + videoMixer.delegate = self + return videoMixer + }() + + #if os(tvOS) + private var _devices: [UInt8: Any] = [:] + @available(tvOS 17.0, *) + var devices: [UInt8: VideoDeviceUnit] { + get { + _devices as! [UInt8: VideoDeviceUnit] + } + set { + _devices = newValue + } + } + #elseif os(iOS) || os(macOS) || os(visionOS) + var devices: [UInt8: VideoDeviceUnit] = [:] + #endif + + private let session: (any CaptureSessionConvertible) + + init(_ session: (some CaptureSessionConvertible)) { + self.session = session + } + + func append(_ track: UInt8, buffer: CMSampleBuffer) { + videoMixer.append(track, sampleBuffer: buffer) + } + + @available(tvOS 17.0, *) + func attachVideo(_ track: UInt8, device: AVCaptureDevice?, configuration: VideoDeviceConfigurationBlock?) throws { + try session.configuration { _ in + session.detachCapture(devices[track]) + videoMixer.reset(track) + devices[track] = nil + if let device { + if hasDevice && session.isMultiCamSessionEnabled == false { + throw Error.multiCamNotSupported + } + let capture = try VideoDeviceUnit(track, device: device) + try? capture.setDynamicRangeMode(dynamicRangeMode) + #if os(iOS) || os(macOS) + capture.videoOrientation = videoOrientation + #endif + capture.setSampleBufferDelegate(self) + try? configuration?(capture) + session.attachCapture(capture) + capture.apply() + devices[track] = capture + } + } + } + + #if os(iOS) || os(tvOS) || os(macOS) + @available(tvOS 17.0, *) + func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { + for capture in devices.values { + capture.setTorchMode(torchMode) + } + } + #endif + + @available(tvOS 17.0, *) + func makeDataOutput(_ track: UInt8) -> VideoCaptureUnitDataOutput { + return .init(track: track, videoMixer: videoMixer) + } + + @available(tvOS 17.0, *) + func suspend() { + guard !isSuspended else { + return + } + for capture in devices.values { + session.detachCapture(capture) + } + isSuspended = true + } + + @available(tvOS 17.0, *) + func resume() { + guard isSuspended else { + return + } + for capture in devices.values { + session.attachCapture(capture) + } + isSuspended = false + } + + func finish() { + _inputs.finish() + _output.finish() + } +} + +extension VideoCaptureUnit: VideoMixerDelegate { + // MARK: VideoMixerDelegate + func videoMixer(_ videoMixer: VideoMixer, track: UInt8, didInput sampleBuffer: CMSampleBuffer) { + _inputs.yield((track, sampleBuffer)) + } + + func videoMixer(_ videoMixer: VideoMixer, didOutput sampleBuffer: CMSampleBuffer) { + _output.yield(sampleBuffer) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift new file mode 100644 index 000000000..4288c3d41 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoDeviceUnit.swift @@ -0,0 +1,209 @@ +import AVFoundation +import Foundation + +/// Configuration calback block for a VideoDeviceUnit. +@available(tvOS 17.0, *) +public typealias VideoDeviceConfigurationBlock = @Sendable (VideoDeviceUnit) throws -> Void + +/// An object that provides the interface to control the AVCaptureDevice's transport behavior. +@available(tvOS 17.0, *) +public final class VideoDeviceUnit: DeviceUnit { + /// The error domain codes. + public enum Error: Swift.Error { + /// The frameRate isn’t supported. + case unsupportedFrameRate + /// The dynamic range mode isn't supported. + case unsupportedDynamicRangeMode(_ mode: DynamicRangeMode) + } + + /// The output type that this capture video data output.. + public typealias Output = AVCaptureVideoDataOutput + + /// The default color format. + public static let colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + + /// The device object. + public private(set) var device: AVCaptureDevice? + + /// The frame rate for capturing video frame. + public private(set) var frameRate = MediaMixer.defaultFrameRate + + /// Specifies the video capture color format. + public var colorFormat = VideoDeviceUnit.colorFormat + + /// The track number. + public let track: UInt8 + /// The input data to a cupture session. + public private(set) var input: AVCaptureInput? + /// The output data to a sample buffers. + public private(set) var output: Output? { + didSet { + oldValue?.setSampleBufferDelegate(nil, queue: nil) + guard let output else { + return + } + output.alwaysDiscardsLateVideoFrames = true + } + } + /// The connection from a capture input to a capture output. + public private(set) var connection: AVCaptureConnection? + + #if os(iOS) || os(macOS) + /// Specifies the videoOrientation indicates whether to rotate the video flowing through the connection to a given orientation. + public var videoOrientation: AVCaptureVideoOrientation = .portrait { + didSet { + output?.connections.filter { $0.isVideoOrientationSupported }.forEach { + $0.videoOrientation = videoOrientation + } + } + } + #endif + + #if os(iOS) || os(macOS) || os(tvOS) + /// Spcifies the video mirroed indicates whether the video flowing through the connection should be mirrored about its vertical axis. + public var isVideoMirrored = false { + didSet { + output?.connections.filter { $0.isVideoMirroringSupported }.forEach { + $0.isVideoMirrored = isVideoMirrored + } + } + } + #endif + + #if os(iOS) + /// Specifies the preferredVideoStabilizationMode most appropriate for use with the connection. + public var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode = .off { + didSet { + output?.connections.filter { $0.isVideoStabilizationSupported }.forEach { + $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode + } + } + } + #endif + + private var dynamicRangeMode: DynamicRangeMode = .sdr + private var dataOutput: VideoCaptureUnitDataOutput? + + init(_ track: UInt8, device: AVCaptureDevice) throws { + self.track = track + input = try AVCaptureDeviceInput(device: device) + self.output = AVCaptureVideoDataOutput() + self.device = device + #if os(iOS) + if let output, let port = input?.ports.first(where: { $0.mediaType == .video && $0.sourceDeviceType == device.deviceType && $0.sourceDevicePosition == device.position }) { + connection = AVCaptureConnection(inputPorts: [port], output: output) + } else { + connection = nil + } + #elseif os(tvOS) || os(macOS) + if let output, let port = input?.ports.first(where: { $0.mediaType == .video }) { + connection = AVCaptureConnection(inputPorts: [port], output: output) + } else { + connection = nil + } + #endif + } + + /// Sets the frame rate of a device capture. + public func setFrameRate(_ frameRate: Float64) throws { + guard let device else { + return + } + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() + } + if device.activeFormat.isFrameRateSupported(frameRate) { + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + } else { + if let format = device.videoFormat( + width: device.activeFormat.formatDescription.dimensions.width, + height: device.activeFormat.formatDescription.dimensions.height, + frameRate: frameRate, + isMultiCamSupported: device.activeFormat.isMultiCamSupported + ) { + device.activeFormat = format + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + } else { + throw Error.unsupportedFrameRate + } + } + self.frameRate = frameRate + } + + func setDynamicRangeMode(_ dynamicRangeMode: DynamicRangeMode) throws { + guard let device, self.dynamicRangeMode != dynamicRangeMode else { + return + } + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() + } + let activeFormat = device.activeFormat + if let format = device.formats.filter({ $0.formatDescription.dimensions.size == activeFormat.formatDescription.dimensions.size }).first(where: { $0.formatDescription.mediaSubType.rawValue == dynamicRangeMode.videoFormat }) { + device.activeFormat = format + self.dynamicRangeMode = dynamicRangeMode + } else { + throw Error.unsupportedDynamicRangeMode(dynamicRangeMode) + } + } + + #if os(iOS) || os(tvOS) || os(macOS) + func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { + guard let device, device.isTorchModeSupported(torchMode) else { + return + } + do { + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() + } + device.torchMode = torchMode + } catch { + logger.error("while setting torch:", error) + } + } + #endif + + func setSampleBufferDelegate(_ videoUnit: VideoCaptureUnit?) { + dataOutput = videoUnit?.makeDataOutput(track) + output?.setSampleBufferDelegate(dataOutput, queue: videoUnit?.lockQueue) + } + + func apply() { + #if os(iOS) || os(tvOS) || os(macOS) + output?.connections.forEach { + if $0.isVideoMirroringSupported { + $0.isVideoMirrored = isVideoMirrored + } + #if os(iOS) || os(macOS) + if $0.isVideoOrientationSupported { + $0.videoOrientation = videoOrientation + } + #endif + #if os(iOS) + if $0.isVideoStabilizationSupported { + $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode + } + #endif + } + #endif + } +} + +@available(tvOS 17.0, *) +final class VideoCaptureUnitDataOutput: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { + private let track: UInt8 + private let videoMixer: VideoMixer + + init(track: UInt8, videoMixer: VideoMixer) { + self.track = track + self.videoMixer = videoMixer + } + + func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + videoMixer.append(track, sampleBuffer: sampleBuffer) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift new file mode 100644 index 000000000..3def985dc --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixer.swift @@ -0,0 +1,53 @@ +import CoreImage +import CoreMedia +import Foundation + +protocol VideoMixerDelegate: AnyObject { + func videoMixer(_ videoMixer: VideoMixer, track: UInt8, didInput sampleBuffer: CMSampleBuffer) + func videoMixer(_ videoMixer: VideoMixer, didOutput sampleBuffer: CMSampleBuffer) +} + +private let kVideoMixer_lockFlags = CVPixelBufferLockFlags(rawValue: .zero) + +final class VideoMixer { + weak var delegate: T? + var settings: VideoMixerSettings = .default + private(set) var inputFormats: [UInt8: CMFormatDescription] = [:] + private var currentPixelBuffer: CVPixelBuffer? + + func append(_ track: UInt8, sampleBuffer: CMSampleBuffer) { + inputFormats[track] = sampleBuffer.formatDescription + delegate?.videoMixer(self, track: track, didInput: sampleBuffer) + switch settings.mode { + case .offscreen: + break + case .passthrough: + if settings.mainTrack == track { + outputSampleBuffer(sampleBuffer) + } + } + } + + func reset(_ track: UInt8) { + inputFormats[track] = nil + } + + @inline(__always) + private func outputSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + defer { + currentPixelBuffer = sampleBuffer.imageBuffer + } + guard settings.isMuted else { + delegate?.videoMixer(self, didOutput: sampleBuffer) + return + } + do { + try sampleBuffer.imageBuffer?.mutate(kVideoMixer_lockFlags) { imageBuffer in + try imageBuffer.copy(currentPixelBuffer) + } + delegate?.videoMixer(self, didOutput: sampleBuffer) + } catch { + logger.warn(error) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift new file mode 100644 index 000000000..cdc08c98a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Mixer/VideoMixerSettings.swift @@ -0,0 +1,33 @@ +import Accelerate +import CoreMedia +import Foundation + +/// Constraints on the video mixier settings. +public struct VideoMixerSettings: Codable, Sendable { + /// The default setting for the mixer. + public static let `default`: VideoMixerSettings = .init() + + /// The type of image rendering mode. + public enum Mode: String, Codable, Sendable { + /// The input buffer will be used as it is. No effects will be applied. + case passthrough + /// Off-screen rendering will be performed to allow for more flexible drawing. + case offscreen + } + + /// Specifies the image rendering mode. + public var mode: Mode + + /// Specifies the muted indicies whether freeze video signal or not. + public var isMuted: Bool + + /// Specifies the main track number. + public var mainTrack: UInt8 + + /// Create a new instance. + public init(mode: Mode = .passthrough, isMuted: Bool = false, mainTrack: UInt8 = 0) { + self.mode = mode + self.isMuted = isMuted + self.mainTrack = mainTrack + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift new file mode 100644 index 000000000..e1330c1a1 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkConnection.swift @@ -0,0 +1,10 @@ +import Foundation + +/// The interface is the foundation of the RTMPConnection. +public protocol NetworkConnection: Actor { + /// The instance connected to server(true) or not(false). + var connected: Bool { get async } + + /// Closes the connection from the server. + func close() async throws +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift new file mode 100644 index 000000000..caa1ee0d9 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Network/NetworkMonitor.swift @@ -0,0 +1,109 @@ +import Foundation + +/// An objec thatt provides the RTMPConnection, SRTConnection's monitoring events. +package final actor NetworkMonitor { + /// The error domain codes. + public enum Error: Swift.Error { + /// An invalid internal stare. + case invalidState + } + + /// An asynchronous sequence for network monitoring event. + public var event: AsyncStream { + AsyncStream { continuation in + self.continuation = continuation + } + } + + public private(set) var isRunning = false + private var timer: Task? { + didSet { + oldValue?.cancel() + } + } + private var measureInterval = 3 + private var currentBytesInPerSecond = 0 + private var currentBytesOutPerSecond = 0 + private var previousTotalBytesIn = 0 + private var previousTotalBytesOut = 0 + private var previousQueueBytesOut: [Int] = [] + private var continuation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + private weak var reporter: (any NetworkTransportReporter)? + + /// Creates a new instance. + package init(_ reporter: some NetworkTransportReporter) { + self.reporter = reporter + } + + private func collect() async throws -> NetworkMonitorEvent { + guard let report = await reporter?.makeNetworkTransportReport() else { + throw Error.invalidState + } + let totalBytesIn = report.totalBytesIn + let totalBytesOut = report.totalBytesOut + let queueBytesOut = report.queueBytesOut + currentBytesInPerSecond = totalBytesIn - previousTotalBytesIn + currentBytesOutPerSecond = totalBytesOut - previousTotalBytesOut + previousTotalBytesIn = totalBytesIn + previousTotalBytesOut = totalBytesOut + previousQueueBytesOut.append(queueBytesOut) + let eventReport = NetworkMonitorReport( + totalBytesIn: totalBytesIn, + totalBytesOut: totalBytesOut, + currentQueueBytesOut: queueBytesOut, + currentBytesInPerSecond: currentBytesInPerSecond, + currentBytesOutPerSecond: currentBytesOutPerSecond + ) + if measureInterval <= previousQueueBytesOut.count { + defer { + previousQueueBytesOut.removeFirst() + } + var total = 0 + for i in 0.. NetworkMonitor + /// Makes a network transport report. + func makeNetworkTransportReport() async -> NetworkTransportReport +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift new file mode 100644 index 000000000..0436cc3f8 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/AssetScreenObject.swift @@ -0,0 +1,118 @@ +import AVFoundation +import CoreImage + +#if !os(visionOS) +/// An object that manages offscreen rendering an asset resource. +public final class AssetScreenObject: ScreenObject, ChromaKeyProcessable { + public var chromaKeyColor: CGColor? + + /// The reading incidies whether assets reading or not. + public var isReading: Bool { + return reader?.status == .reading + } + + /// The video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + guard videoGravity != oldValue else { + return + } + invalidateLayout() + } + } + + private var reader: AVAssetReader? { + didSet { + if let oldValue, oldValue.status == .reading { + oldValue.cancelReading() + } + } + } + + private var sampleBuffer: CMSampleBuffer? { + didSet { + guard sampleBuffer != oldValue else { + return + } + if sampleBuffer == nil { + cancelReading() + return + } + invalidateLayout() + } + } + + private var startedAt: CMTime = .zero + private var videoTrackOutput: AVAssetReaderTrackOutput? + private var outputSettings = [ + kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA + ] as [String: Any] + + /// Prepares the asset reader to start reading. + public func startReading(_ asset: AVAsset) throws { + reader = try AVAssetReader(asset: asset) + guard let reader else { + return + } + let videoTrack = asset.tracks(withMediaType: .video).first + if let videoTrack { + let videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings) + videoTrackOutput.alwaysCopiesSampleData = false + reader.add(videoTrackOutput) + self.videoTrackOutput = videoTrackOutput + } + startedAt = CMClock.hostTimeClock.time + reader.startReading() + sampleBuffer = videoTrackOutput?.copyNextSampleBuffer() + } + + /// Cancels and stops the reader's output. + public func cancelReading() { + reader = nil + sampleBuffer = nil + videoTrackOutput = nil + } + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard parent != nil, let image = sampleBuffer?.formatDescription?.dimensions.size else { + return super.makeBounds(size) + } + let bounds = super.makeBounds(size) + switch videoGravity { + case .resizeAspect: + let scale = min(bounds.size.width / image.width, bounds.size.height / image.height) + let scaleSize = CGSize(width: image.width * scale, height: image.height * scale) + return super.makeBounds(scaleSize) + case .resizeAspectFill: + return bounds + default: + return bounds + } + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let image: CIImage = makeImage(renderer) else { + return nil + } + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else { + return nil + } + return CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( + bounds.size, + image: pixelBuffer.size + )) + } + + override func draw(_ renderer: some ScreenRenderer) { + super.draw(renderer) + let duration = CMClock.hostTimeClock.time - startedAt + if let sampleBuffer, sampleBuffer.presentationTimeStamp <= duration { + self.sampleBuffer = videoTrackOutput?.copyNextSampleBuffer() + } + } +} +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift new file mode 100644 index 000000000..1838f4d60 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ChromaKeyProcessor.swift @@ -0,0 +1,178 @@ +import Accelerate +import Foundation +import simd + +/// A marker type with a chroma key processable screen object. +@ScreenActor +public protocol ChromaKeyProcessable { + /// Specifies the chroma key color. + var chromaKeyColor: CGColor? { get set } +} + +final class ChromaKeyProcessor { + static let noFlags = vImage_Flags(kvImageNoFlags) + static let labColorSpace = CGColorSpace(name: CGColorSpace.genericLab)! + + enum Error: Swift.Error { + case invalidState + } + + private let entriesPerChannel = 32 + private let sourceChannelCount = 3 + private let destinationChannelCount = 1 + + private let srcFormat = vImage_CGImageFormat( + bitsPerComponent: 32, + bitsPerPixel: 32 * 3, + colorSpace: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: CGBitmapInfo(rawValue: kCGBitmapByteOrder32Host.rawValue | CGBitmapInfo.floatComponents.rawValue | CGImageAlphaInfo.none.rawValue)) + + private let destFormat = vImage_CGImageFormat( + bitsPerComponent: 32, + bitsPerPixel: 32 * 3, + colorSpace: labColorSpace, + bitmapInfo: CGBitmapInfo(rawValue: kCGBitmapByteOrder32Host.rawValue | CGBitmapInfo.floatComponents.rawValue | CGImageAlphaInfo.none.rawValue)) + + private var tables: [CGColor: vImage_MultidimensionalTable] = [:] + private var outputF: [String: vImage_Buffer] = [:] + private var output8: [String: vImage_Buffer] = [:] + private var buffers: [String: [vImage_Buffer]] = [:] + private let converter: vImageConverter + private var maxFloats: [Float] = [1.0, 1.0, 1.0, 1.0] + private var minFloats: [Float] = [0.0, 0.0, 0.0, 0.0] + + init() throws { + guard let srcFormat, let destFormat else { + throw Error.invalidState + } + converter = try vImageConverter.make(sourceFormat: srcFormat, destinationFormat: destFormat) + } + + deinit { + tables.forEach { vImageMultidimensionalTable_Release($0.value) } + output8.forEach { $0.value.free() } + outputF.forEach { $0.value.free() } + buffers.forEach { $0.value.forEach { $0.free() } } + } + + func makeMask(_ source: inout vImage_Buffer, chromeKeyColor: CGColor) throws -> vImage_Buffer { + let key = "\(source.width):\(source.height)" + if tables[chromeKeyColor] == nil { + tables[chromeKeyColor] = try makeLookUpTable(chromeKeyColor, tolerance: 60) + } + if outputF[key] == nil { + outputF[key] = try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32) + } + if output8[key] == nil { + output8[key] = try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 8) + } + guard + let table = tables[chromeKeyColor], + let dest = outputF[key] else { + throw Error.invalidState + } + var dests: [vImage_Buffer] = [dest] + let srcs = try makePlanarFBuffers(&source) + vImageMultiDimensionalInterpolatedLookupTable_PlanarF( + srcs, + &dests, + nil, + table, + kvImageFullInterpolation, + vImage_Flags(kvImageNoFlags) + ) + guard var result = output8[key] else { + throw Error.invalidState + } + vImageConvert_PlanarFtoPlanar8(&dests[0], &result, 1.0, 0.0, Self.noFlags) + return result + } + + private func makePlanarFBuffers(_ source: inout vImage_Buffer) throws -> [vImage_Buffer] { + let key = "\(source.width):\(source.height)" + if buffers[key] == nil { + buffers[key] = [ + try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32), + try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32), + try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32), + try vImage_Buffer(width: Int(source.width), height: Int(source.height), bitsPerPixel: 32) + ] + } + guard var buffers = buffers[key] else { + throw Error.invalidState + } + vImageConvert_ARGB8888toPlanarF( + &source, + &buffers[0], + &buffers[1], + &buffers[2], + &buffers[3], + &maxFloats, + &minFloats, + Self.noFlags) + return [ + buffers[1], + buffers[2], + buffers[3] + ] + } + + private func makeLookUpTable(_ chromaKeyColor: CGColor, tolerance: Float) throws -> vImage_MultidimensionalTable? { + let ramp = vDSP.ramp(in: 0 ... 1.0, count: Int(entriesPerChannel)) + let lookupTableElementCount = Int(pow(Float(entriesPerChannel), Float(sourceChannelCount))) * Int(destinationChannelCount) + var lookupTableData = [UInt16].init(repeating: 0, count: lookupTableElementCount) + let chromaKeyRGB = chromaKeyColor.components ?? [0, 0, 0] + let chromaKeyLab = try rgbToLab( + r: chromaKeyRGB[0], + g: chromaKeyRGB.count > 1 ? chromaKeyRGB[1] : chromaKeyRGB[0], + b: chromaKeyRGB.count > 2 ? chromaKeyRGB[2] : chromaKeyRGB[0] + ) + var bufferIndex = 0 + for red in ramp { + for green in ramp { + for blue in ramp { + let lab = try rgbToLab(r: red, g: green, b: blue) + let distance = simd_distance(chromaKeyLab, lab) + let contrast = Float(20) + let offset = Float(0.25) + let alpha = saturate(tanh(((distance / tolerance ) - 0.5 - offset) * contrast)) + lookupTableData[bufferIndex] = UInt16(alpha * Float(UInt16.max)) + bufferIndex += 1 + } + } + } + var entryCountPerSourceChannel = [UInt8](repeating: UInt8(entriesPerChannel), count: sourceChannelCount) + let result = vImageMultidimensionalTable_Create( + &lookupTableData, + 3, + 1, + &entryCountPerSourceChannel, + kvImageMDTableHint_Float, + vImage_Flags(kvImageNoFlags), + nil) + vImageMultidimensionalTable_Retain(result) + return result + } + + private func rgbToLab(r: CGFloat, g: CGFloat, b: CGFloat) throws -> SIMD3 { + var data: [Float] = [Float(r), Float(g), Float(b)] + var srcPixelBuffer = data.withUnsafeMutableBufferPointer { pointer in + vImage_Buffer(data: pointer.baseAddress, height: 1, width: 1, rowBytes: 4 * 3) + } + var destPixelBuffer = try vImage_Buffer(width: 1, height: 1, bitsPerPixel: 32 * 3) + vImageConvert_AnyToAny(converter, &srcPixelBuffer, &destPixelBuffer, nil, vImage_Flags(kvImageNoFlags)) + defer { + destPixelBuffer.free() + } + let result = destPixelBuffer.data.assumingMemoryBound(to: Float.self) + return .init( + result[0], + result[1], + result[2] + ) + } + + private func saturate(_ x: T) -> T { + return min(max(0, x), 1) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift new file mode 100644 index 000000000..d3fe32ba4 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/DisplayLinkChoreographer.swift @@ -0,0 +1,148 @@ +import Foundation + +#if os(macOS) +import CoreVideo + +// swiftlint:disable attributes +// CADisplayLink is deprecated, I've given up on making it conform to Sendable. +final class DisplayLink: NSObject, @unchecked Sendable { + private static let preferredFramesPerSecond = 0 + + var isPaused = false { + didSet { + guard let displayLink, oldValue != isPaused else { + return + } + if isPaused { + CVDisplayLinkStop(displayLink) + } else { + CVDisplayLinkStart(displayLink) + } + } + } + var preferredFramesPerSecond = DisplayLink.preferredFramesPerSecond { + didSet { + guard preferredFramesPerSecond != oldValue else { + return + } + frameInterval = 1.0 / Double(preferredFramesPerSecond) + } + } + private(set) var duration = 0.0 + private(set) var timestamp: CFTimeInterval = 0 + private(set) var targetTimestamp: CFTimeInterval = 0 + private var selector: Selector? + private var displayLink: CVDisplayLink? + private var frameInterval = 0.0 + private weak var delegate: NSObject? + + deinit { + selector = nil + } + + init(target: NSObject, selector sel: Selector) { + super.init() + CVDisplayLinkCreateWithActiveCGDisplays(&displayLink) + guard let displayLink = displayLink else { + return + } + self.delegate = target + self.selector = sel + CVDisplayLinkSetOutputHandler(displayLink) { [weak self] _, inNow, _, _, _ -> CVReturn in + guard let self else { + return kCVReturnSuccess + } + if frameInterval == 0 || frameInterval <= inNow.pointee.timestamp - self.timestamp { + self.timestamp = Double(inNow.pointee.timestamp) + self.targetTimestamp = self.timestamp + frameInterval + _ = self.delegate?.perform(self.selector, with: self) + } + return kCVReturnSuccess + } + } + + func add(to runloop: RunLoop, forMode mode: RunLoop.Mode) { + guard let displayLink, !isPaused else { + return + } + CVDisplayLinkStart(displayLink) + } + + func invalidate() { + guard let displayLink, isPaused else { + return + } + CVDisplayLinkStop(displayLink) + } +} + +extension CVTimeStamp { + @inlinable @inline(__always) + var timestamp: Double { + Double(self.hostTime) / Double(self.videoTimeScale) + } +} + +// swiftlint:enable attributes + +#else +import QuartzCore +typealias DisplayLink = CADisplayLink +#endif + +struct DisplayLinkTime { + let timestamp: TimeInterval + let targetTimestamp: TimeInterval +} + +final class DisplayLinkChoreographer: NSObject { + private static let preferredFramesPerSecond = 0 + + var updateFrames: AsyncStream { + AsyncStream { continuation in + self.continutation = continuation + } + } + var preferredFramesPerSecond = DisplayLinkChoreographer.preferredFramesPerSecond { + didSet { + guard preferredFramesPerSecond != oldValue else { + return + } + displayLink?.preferredFramesPerSecond = preferredFramesPerSecond + } + } + private(set) var isRunning = false + private var displayLink: DisplayLink? { + didSet { + oldValue?.invalidate() + displayLink?.preferredFramesPerSecond = preferredFramesPerSecond + displayLink?.isPaused = false + displayLink?.add(to: .main, forMode: .common) + } + } + private var continutation: AsyncStream.Continuation? + + @objc + private func update(displayLink: DisplayLink) { + continutation?.yield(.init(timestamp: displayLink.timestamp, targetTimestamp: displayLink.targetTimestamp)) + } +} + +extension DisplayLinkChoreographer: Runner { + func startRunning() { + guard !isRunning else { + return + } + displayLink = DisplayLink(target: self, selector: #selector(self.update(displayLink:))) + isRunning = true + } + + func stopRunning() { + guard isRunning else { + return + } + isRunning = false + displayLink = nil + continutation?.finish() + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift new file mode 100644 index 000000000..445a5008d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ImageScreenObject.swift @@ -0,0 +1,59 @@ +import CoreImage + +/// An object that manages offscreen rendering a cgImage source. +public final class ImageScreenObject: ScreenObject { + /// Specifies the image. + public var cgImage: CGImage? { + didSet { + guard cgImage != oldValue else { + return + } + invalidateLayout() + } + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + let intersection = bounds.intersection(renderer.bounds) + + guard bounds != intersection else { + return cgImage + } + + // Handling when the drawing area is exceeded. + let x: CGFloat + switch horizontalAlignment { + case .left: + x = bounds.origin.x + case .center: + x = bounds.origin.x / 2 + case .right: + x = 0.0 + } + + let y: CGFloat + switch verticalAlignment { + case .top: + y = 0.0 + case .middle: + y = abs(bounds.origin.y) / 2 + case .bottom: + y = abs(bounds.origin.y) + } + + return cgImage?.cropping(to: .init(origin: .init(x: x, y: y), size: intersection.size)) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + guard let image: CGImage = makeImage(renderer) else { + return nil + } + return CIImage(cgImage: image) + } + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard let cgImage else { + return super.makeBounds(size) + } + return super.makeBounds(size == .zero ? cgImage.size : size) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift new file mode 100644 index 000000000..303cbeced --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/RoundedRectangleFactory.swift @@ -0,0 +1,25 @@ +import CoreImage + +final class RoundedRectangleFactory { + private var imageBuffers: [String: CIImage] = [:] + + func cornerRadius(_ size: CGSize, cornerRadius: CGFloat) -> CIImage? { + let key = "\(size.width):\(size.height):\(cornerRadius)" + if let buffer = imageBuffers[key] { + return buffer + } + let roundedRect = CIFilter.roundedRectangleGenerator() + roundedRect.extent = .init(origin: .zero, size: size) + roundedRect.radius = Float(cornerRadius) + guard + let image = roundedRect.outputImage else { + return nil + } + imageBuffers[key] = image + return imageBuffers[key] + } + + func removeAll() { + imageBuffers.removeAll() + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift new file mode 100644 index 000000000..90a7d04b7 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Screen.swift @@ -0,0 +1,236 @@ +import AVFoundation +import Foundation + +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +/// An interface a screen uses to inform its delegate. +public protocol ScreenDelegate: AnyObject { + /// Tells the receiver to screen object layout phase. + func screen(_ screen: Screen, willLayout time: CMTime) +} + +/// An object that manages offscreen rendering a foundation. +public final class Screen: ScreenObjectContainerConvertible { + /// The default screen size. + public static let size = CGSize(width: 1280, height: 720) + + private static let lockFlags = CVPixelBufferLockFlags(rawValue: 0) + private static let preferredTimescale: CMTimeScale = 1000000000 + + /// The total of child counts. + public var childCounts: Int { + return root.childCounts + } + + /// Specifies the delegate object. + public weak var delegate: (any ScreenDelegate)? + + /// Specifies the video size to use when output a video. + public var size: CGSize = Screen.size { + didSet { + guard size != oldValue else { + return + } + renderer.bounds = .init(origin: .zero, size: size) + CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool) + } + } + + /// Specifies the gpu rendering enabled. + @available(*, deprecated) + public var isGPURendererEnabled = false { + didSet { + guard isGPURendererEnabled != oldValue else { + return + } + if isGPURendererEnabled { + renderer = ScreenRendererByGPU(dynamicRangeMode: dynamicRangeMode) + } else { + renderer = ScreenRendererByCPU(dynamicRangeMode: dynamicRangeMode) + } + } + } + + #if os(macOS) + /// Specifies the background color. + public var backgroundColor: CGColor = NSColor.black.cgColor { + didSet { + guard backgroundColor != oldValue else { + return + } + renderer.backgroundColor = backgroundColor + } + } + #else + /// Specifies the background color. + public var backgroundColor: CGColor = UIColor.black.cgColor { + didSet { + guard backgroundColor != oldValue else { + return + } + renderer.backgroundColor = backgroundColor + } + } + #endif + + var synchronizationClock: CMClock? { + get { + return renderer.synchronizationClock + } + set { + renderer.synchronizationClock = newValue + } + } + var dynamicRangeMode: DynamicRangeMode = .sdr { + didSet { + guard dynamicRangeMode != oldValue else { + return + } + if isGPURendererEnabled { + renderer = ScreenRendererByGPU(dynamicRangeMode: dynamicRangeMode) + } else { + renderer = ScreenRendererByCPU(dynamicRangeMode: dynamicRangeMode) + } + CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool) + } + } + private(set) var renderer: (any ScreenRenderer) = ScreenRendererByCPU(dynamicRangeMode: .sdr) { + didSet { + renderer.bounds = oldValue.bounds + renderer.backgroundColor = oldValue.backgroundColor + renderer.synchronizationClock = oldValue.synchronizationClock + } + } + private(set) var targetTimestamp: TimeInterval = 0.0 + private(set) var videoTrackScreenObject = VideoTrackScreenObject() + private var videoCaptureLatency: TimeInterval = 0.0 + private var root: ScreenObjectContainer = .init() + private var outputFormat: CMFormatDescription? + private var pixelBufferPool: CVPixelBufferPool? { + didSet { + outputFormat = nil + } + } + private var presentationTimeStamp: CMTime = .zero + + /// Creates a screen object. + public init() { + try? addChild(videoTrackScreenObject) + CVPixelBufferPoolCreate(nil, nil, dynamicRangeMode.makePixelBufferAttributes(size), &pixelBufferPool) + } + + /// Adds the specified screen object as a child of the current screen object container. + public func addChild(_ child: ScreenObject?) throws { + try root.addChild(child) + } + + /// Removes the specified screen object as a child of the current screen object container. + public func removeChild(_ child: ScreenObject?) { + root.removeChild(child) + } + + /// Registers a video effect. + public func registerVideoEffect(_ effect: some VideoEffect) -> Bool { + return videoTrackScreenObject.registerVideoEffect(effect) + } + + /// Unregisters a video effect. + public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool { + return videoTrackScreenObject.unregisterVideoEffect(effect) + } + + func append(_ track: UInt8, buffer: CMSampleBuffer) { + let screens: [VideoTrackScreenObject] = root.getScreenObjects() + for screen in screens where screen.track == track { + screen.enqueue(buffer) + } + } + + func makeSampleBuffer(_ updateFrame: DisplayLinkTime) -> CMSampleBuffer? { + defer { + targetTimestamp = updateFrame.targetTimestamp + } + var pixelBuffer: CVPixelBuffer? + pixelBufferPool?.createPixelBuffer(&pixelBuffer) + guard let pixelBuffer else { + return nil + } + if outputFormat == nil { + CMVideoFormatDescriptionCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescriptionOut: &outputFormat + ) + } + guard let outputFormat else { + return nil + } + if let dictionary = CVBufferCopyAttachments(pixelBuffer, .shouldNotPropagate) { + CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate) + } + let presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale) + guard self.presentationTimeStamp <= presentationTimeStamp else { + return nil + } + self.presentationTimeStamp = presentationTimeStamp + var timingInfo = CMSampleTimingInfo( + duration: CMTime(seconds: updateFrame.targetTimestamp - updateFrame.timestamp, preferredTimescale: Self.preferredTimescale), + presentationTimeStamp: presentationTimeStamp, + decodeTimeStamp: .invalid + ) + var sampleBuffer: CMSampleBuffer? + guard CMSampleBufferCreateReadyWithImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &sampleBuffer + ) == noErr else { + return nil + } + if let sampleBuffer { + return render(sampleBuffer) + } else { + return nil + } + } + + func render(_ sampleBuffer: CMSampleBuffer) -> CMSampleBuffer { + try? sampleBuffer.imageBuffer?.lockBaseAddress(Self.lockFlags) + defer { + try? sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFlags) + } + renderer.presentationTimeStamp = sampleBuffer.presentationTimeStamp + renderer.setTarget(sampleBuffer.imageBuffer) + if let dimensions = sampleBuffer.formatDescription?.dimensions { + root.size = dimensions.size + } + delegate?.screen(self, willLayout: sampleBuffer.presentationTimeStamp) + root.layout(renderer) + root.draw(renderer) + renderer.render() + return sampleBuffer + } + + func setVideoCaptureLatency(_ presentationTimeStamp: CMTime) { + guard 0 < targetTimestamp else { + return + } + let hostPresentationTimeStamp = presentationTimeStamp.convertTime(from: synchronizationClock) + let diff = ceil((targetTimestamp - hostPresentationTimeStamp.seconds) * 10000) / 10000 + videoCaptureLatency = diff + } + + func reset() { + let screens: [VideoTrackScreenObject] = root.getScreenObjects() + for screen in screens { + screen.reset() + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift new file mode 100644 index 000000000..d35dc4754 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenActor.swift @@ -0,0 +1,11 @@ +import Foundation + +/// A singleton actor whose executor screen object rendering. +@globalActor +public actor ScreenActor { + /// The shared actor instance. + public static let shared = ScreenActor() + + private init() { + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift new file mode 100644 index 000000000..ec8b8e97c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObject.swift @@ -0,0 +1,162 @@ +import Accelerate +import AVFoundation +import CoreImage +import CoreMedia +import Foundation +import VideoToolbox + +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +/// The ScreenObject class is the abstract class for all objects that are rendered on the screen. +@ScreenActor +open class ScreenObject { + /// The horizontal alignment for the screen object. + public enum HorizontalAlignment { + /// A guide that marks the left edge of the screen object. + case left + /// A guide that marks the borizontal center of the screen object. + case center + /// A guide that marks the right edge of the screen object. + case right + } + + /// The vertical alignment for the screen object. + public enum VerticalAlignment { + /// A guide that marks the top edge of the screen object. + case top + /// A guide that marks the vertical middle of the screen object. + case middle + /// A guide that marks the bottom edge of the screen object. + case bottom + } + + enum BlendMode { + case normal + case alpha + } + + /// The screen object container that contains this screen object + public internal(set) weak var parent: ScreenObjectContainer? + + /// Specifies the size rectangle. + public var size: CGSize = .zero { + didSet { + guard size != oldValue else { + return + } + shouldInvalidateLayout = true + } + } + + /// The bounds rectangle. + public internal(set) var bounds: CGRect = .zero + + /// Specifies the visibility of the object. + public var isVisible = true + + #if os(macOS) + /// Specifies the default spacing to laying out content in the screen object. + public var layoutMargin: NSEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0) + #else + /// Specifies the default spacing to laying out content in the screen object. + public var layoutMargin: UIEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0) + #endif + + /// Specifies the radius to use when drawing rounded corners. + public var cornerRadius: CGFloat = 0.0 + + /// Specifies the alignment position along the vertical axis. + public var verticalAlignment: VerticalAlignment = .top + + /// Specifies the alignment position along the horizontal axis. + public var horizontalAlignment: HorizontalAlignment = .left + + var blendMode: BlendMode { + .alpha + } + + var shouldInvalidateLayout = true + + /// Creates a screen object. + public init() { + } + + /// Invalidates the current layout and triggers a layout update. + public func invalidateLayout() { + shouldInvalidateLayout = true + } + + /// Makes cgImage for offscreen image. + @available(*, deprecated, message: "It will be removed in the next major update. Please migrate to using CIImage instead.") + open func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + return nil + } + + /// Makes ciImage for offscreen image. + open func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + return nil + } + + /// Makes screen object bounds for offscreen image. + open func makeBounds(_ size: CGSize) -> CGRect { + guard let parent else { + return .init(origin: .zero, size: self.size) + } + + let width = size.width == 0 ? max(parent.bounds.width - layoutMargin.left - layoutMargin.right + size.width, 0) : size.width + let height = size.height == 0 ? max(parent.bounds.height - layoutMargin.top - layoutMargin.bottom + size.height, 0) : size.height + + let parentX = parent.bounds.origin.x + let parentWidth = parent.bounds.width + let x: CGFloat + switch horizontalAlignment { + case .center: + x = parentX + (parentWidth - width) / 2 + case .left: + x = parentX + layoutMargin.left + case .right: + x = parentX + (parentWidth - width) - layoutMargin.right + } + + let parentY = parent.bounds.origin.y + let parentHeight = parent.bounds.height + let y: CGFloat + switch verticalAlignment { + case .top: + y = parentY + layoutMargin.top + case .middle: + y = parentY + (parentHeight - height) / 2 + case .bottom: + y = parentY + (parentHeight - height) - layoutMargin.bottom + } + + return .init(x: x, y: y, width: width, height: height) + } + + func layout(_ renderer: some ScreenRenderer) { + bounds = makeBounds(size) + renderer.layout(self) + shouldInvalidateLayout = false + } + + func draw(_ renderer: some ScreenRenderer) { + renderer.draw(self) + } +} + +extension ScreenObject: Hashable { + // MARK: Hashable + nonisolated public static func == (lhs: ScreenObject, rhs: ScreenObject) -> Bool { + lhs === rhs + } + + nonisolated public func hash(into hasher: inout Hasher) { + hasher.combine(ObjectIdentifier(self)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift new file mode 100644 index 000000000..343c47be2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenObjectContainer.swift @@ -0,0 +1,80 @@ +import AVFoundation +import Foundation + +@ScreenActor +protocol ScreenObjectContainerConvertible: AnyObject { + func addChild(_ child: ScreenObject?) throws + func removeChild(_ child: ScreenObject?) +} + +/// An object represents a collection of screen objects. +public class ScreenObjectContainer: ScreenObject, ScreenObjectContainerConvertible { + /// The error domain codes. + public enum Error: Swift.Error { + /// An error the screen object registry throws when the app registers a screen object twice by the same instance. + case alreadyExists + } + + /// The total of child counts. + public var childCounts: Int { + children.count + } + + private var children: [ScreenObject] = .init() + + /// Adds the specified screen object as a child of the current screen object container. + public func addChild(_ child: ScreenObject?) throws { + guard let child, child != self else { + return + } + if child.parent != nil { + throw Error.alreadyExists + } + child.parent = self + children.append(child) + invalidateLayout() + } + + /// Removes the specified screen object as a child of the current screen object container. + public func removeChild(_ child: ScreenObject?) { + guard let child, child.parent == self else { + return + } + guard let indexOf = children.firstIndex(where: { $0 == child }) else { + return + } + child.parent = nil + children.remove(at: indexOf) + invalidateLayout() + } + + override func layout(_ renderer: some ScreenRenderer) { + bounds = makeBounds(size) + children.forEach { child in + if child.shouldInvalidateLayout || shouldInvalidateLayout { + child.layout(renderer) + } + } + shouldInvalidateLayout = false + } + + override func draw(_ renderer: some ScreenRenderer) { + guard isVisible else { + return + } + children.forEach { child in + guard child.isVisible else { + return + } + child.draw(renderer) + } + } + + func getScreenObjects() -> [T] { + var objects = children.compactMap { $0 as? T } + children.compactMap { $0 as? ScreenObjectContainer }.forEach { + objects += $0.getScreenObjects() + } + return objects + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift new file mode 100644 index 000000000..973e3300c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRenderer.swift @@ -0,0 +1,28 @@ +import AVFoundation +import CoreImage +import Foundation + +/// A type that renders a screen object. +@ScreenActor +public protocol ScreenRenderer: AnyObject { + /// The CIContext instance. + var context: CIContext { get } + /// The CIImage options. + var imageOptions: [CIImageOption: Any]? { get } + /// Specifies the backgroundColor for output video. + var backgroundColor: CGColor { get set } + /// The current screen bounds. + var bounds: CGRect { get set } + /// The current presentationTimeStamp. + var presentationTimeStamp: CMTime { get set } + /// The current session synchronization clock. + var synchronizationClock: CMClock? { get set } + /// Layouts a screen object. + func layout(_ screenObject: ScreenObject) + /// Draws a sceen object. + func draw(_ screenObject: ScreenObject) + /// Sets up the render target. + func setTarget(_ pixelBuffer: CVPixelBuffer?) + /// Render a screen to buffer. + func render() +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift new file mode 100644 index 000000000..c52db0115 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByCPU.swift @@ -0,0 +1,184 @@ +import Accelerate +import AVFoundation +import CoreImage + +final class ScreenRendererByCPU: ScreenRenderer { + static let noFlags = vImage_Flags(kvImageNoFlags) + static let doNotTile = vImage_Flags(kvImageDoNotTile) + + var bounds: CGRect = .init(origin: .zero, size: Screen.size) + let imageOptions: [CIImageOption: Any]? + var synchronizationClock: CMClock? + var presentationTimeStamp: CMTime = .zero + + let context: CIContext + + var backgroundColor = CGColor(red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00) { + didSet { + guard backgroundColor != oldValue, let components = backgroundColor.components else { + return + } + switch components.count { + case 2: + backgroundColorUInt8Array = [ + UInt8(components[1] * 255), + UInt8(components[0] * 255), + UInt8(components[0] * 255), + UInt8(components[0] * 255) + ] + case 3: + backgroundColorUInt8Array = [ + UInt8(components[2] * 255), + UInt8(components[0] * 255), + UInt8(components[1] * 255), + UInt8(components[1] * 255) + ] + case 4: + backgroundColorUInt8Array = [ + UInt8(components[3] * 255), + UInt8(components[0] * 255), + UInt8(components[1] * 255), + UInt8(components[2] * 255) + ] + default: + break + } + } + } + + private var format = vImage_CGImageFormat( + bitsPerComponent: 8, + bitsPerPixel: 32, + colorSpace: nil, + bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue), + version: 0, + decode: nil, + renderingIntent: .defaultIntent) + + private var images: [ScreenObject: vImage_Buffer] = [:] + private var canvas: vImage_Buffer = .init() + private var converter: vImageConverter? + private var shapeFactory = ShapeFactory() + private var pixelFormatType: OSType? { + didSet { + guard pixelFormatType != oldValue else { + return + } + converter = nil + } + } + private var backgroundColorUInt8Array: [UInt8] = [0x00, 0x00, 0x00, 0x00] + private lazy var choromaKeyProcessor: ChromaKeyProcessor? = { + return try? ChromaKeyProcessor() + }() + + init(dynamicRangeMode: DynamicRangeMode) { + context = dynamicRangeMode.makeCIContext() + if let colorSpace = dynamicRangeMode.colorSpace { + imageOptions = [.colorSpace: colorSpace] + } else { + imageOptions = nil + } + } + + func setTarget(_ pixelBuffer: CVPixelBuffer?) { + guard let pixelBuffer else { + return + } + pixelFormatType = pixelBuffer.pixelFormatType + if converter == nil { + let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(pixelBuffer).takeRetainedValue() + vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB()) + converter = try? vImageConverter.make( + sourceFormat: cvImageFormat, + destinationFormat: format + ) + } + guard let converter else { + return + } + vImageBuffer_InitForCopyFromCVPixelBuffer( + &canvas, + converter, + pixelBuffer, + vImage_Flags(kvImageNoAllocate) + ) + switch pixelFormatType { + case kCVPixelFormatType_32ARGB: + vImageBufferFill_ARGB8888( + &canvas, + &backgroundColorUInt8Array, + vImage_Flags(kvImageNoFlags) + ) + default: + break + } + } + + func layout(_ screenObject: ScreenObject) { + autoreleasepool { + guard let image: CGImage = screenObject.makeImage(self) else { + return + } + do { + images[screenObject]?.free() + var buffer = try vImage_Buffer(cgImage: image, format: format) + images[screenObject] = buffer + if 0 < screenObject.cornerRadius { + if var mask = shapeFactory.cornerRadius(image.size, cornerRadius: screenObject.cornerRadius) { + vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) + } + } else { + if let screenObject = screenObject as? (any ChromaKeyProcessable), + let chromaKeyColor = screenObject.chromaKeyColor, + var mask = try choromaKeyProcessor?.makeMask(&buffer, chromeKeyColor: chromaKeyColor) { + vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) + } + } + } catch { + logger.error(error) + } + } + } + + func draw(_ screenObject: ScreenObject) { + guard var image = images[screenObject] else { + return + } + + let origin = screenObject.bounds.origin + let start = Int(max(0, origin.y)) * canvas.rowBytes + Int(max(0, origin.x)) * 4 + + var destination = vImage_Buffer( + data: canvas.data.advanced(by: start), + height: image.height, + width: image.width, + rowBytes: canvas.rowBytes + ) + + switch pixelFormatType { + case kCVPixelFormatType_32ARGB: + switch screenObject.blendMode { + case .normal: + vImageCopyBuffer( + &image, + &destination, + 4, + Self.doNotTile + ) + case .alpha: + vImageAlphaBlend_ARGB8888( + &image, + &destination, + &destination, + Self.doNotTile + ) + } + default: + break + } + } + + func render() { + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift new file mode 100644 index 000000000..a214ddcef --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ScreenRendererByGPU.swift @@ -0,0 +1,87 @@ +import Accelerate +import AVFoundation +import CoreImage +import CoreImage.CIFilterBuiltins + +final class ScreenRendererByGPU: ScreenRenderer { + var bounds: CGRect = .init(origin: .zero, size: Screen.size) + let imageOptions: [CIImageOption: Any]? + var synchronizationClock: CMClock? + var presentationTimeStamp: CMTime = .zero + + let context: CIContext + + var backgroundColor = CGColor(red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00) { + didSet { + guard backgroundColor != oldValue else { + return + } + backgroundCIColor = CIColor(cgColor: backgroundColor) + } + } + + private var canvas: CIImage = .init() + private var images: [ScreenObject: CIImage] = [:] + private var pixelBuffer: CVPixelBuffer? + private let dynamicRangeMode: DynamicRangeMode + private var backgroundCIColor = CIColor() + private var roundedRectangleFactory = RoundedRectangleFactory() + + init(dynamicRangeMode: DynamicRangeMode) { + self.dynamicRangeMode = dynamicRangeMode + context = dynamicRangeMode.makeCIContext() + if let colorSpace = dynamicRangeMode.colorSpace { + imageOptions = [.colorSpace: colorSpace] + } else { + imageOptions = nil + } + } + + func setTarget(_ pixelBuffer: CVPixelBuffer?) { + guard let pixelBuffer else { + return + } + self.pixelBuffer = pixelBuffer + canvas = CIImage(color: backgroundCIColor).cropped(to: bounds) + } + + func layout(_ screenObject: ScreenObject) { + guard let image: CIImage = screenObject.makeImage(self) else { + return + } + if 0 < screenObject.cornerRadius { + if let mask = roundedRectangleFactory.cornerRadius(screenObject.bounds.size, cornerRadius: screenObject.cornerRadius) { + images[screenObject] = image.applyingFilter("CIBlendWithAlphaMask", parameters: [ + "inputMaskImage": mask + ]) + } else { + images[screenObject] = image + } + } else { + images[screenObject] = image + } + } + + func draw(_ screenObject: ScreenObject) { + guard let image = images[screenObject] else { + return + } + let origin = screenObject.bounds.origin + if origin.x == 0 && origin.y == 0 { + canvas = image + .composited(over: canvas) + } else { + canvas = image + .transformed(by: .init(translationX: origin.x, y: bounds.height - origin.y - screenObject.bounds.height)) + .composited(over: canvas) + } + } + + func render() { + guard let pixelBuffer else { + return + } + context.render(canvas, to: pixelBuffer, bounds: canvas.extent, colorSpace: dynamicRangeMode.colorSpace) + dynamicRangeMode.attach(pixelBuffer) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift new file mode 100644 index 000000000..b255f022a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/Shape.swift @@ -0,0 +1,40 @@ +import Accelerate +import Foundation + +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +final class RoundedSquareShape: Shape { + var rect: CGRect = .zero + var cornerRadius: CGFloat = .zero + + func makeCGImage() -> CGImage? { + guard let context = CGContext( + data: nil, + width: Int(rect.width), + height: Int(rect.height), + bitsPerComponent: 8, + bytesPerRow: Int(rect.width), + space: CGColorSpaceCreateDeviceGray(), + bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue).rawValue + ) else { + return nil + } + let path = CGPath(roundedRect: rect, cornerWidth: cornerRadius, cornerHeight: cornerRadius, transform: nil) + #if canImport(AppKit) && !targetEnvironment(macCatalyst) + context.setFillColor(NSColor.white.cgColor) + #endif + #if canImport(UIKit) + context.setFillColor(UIColor.white.cgColor) + #endif + context.addPath(path) + context.closePath() + context.fillPath() + return context.makeImage() + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift new file mode 100644 index 000000000..5b5b70aaa --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/ShapeFactory.swift @@ -0,0 +1,34 @@ +import Accelerate +import CoreMedia +import Foundation + +protocol Shape { + func makeCGImage() -> CGImage? +} + +final class ShapeFactory { + private var imageBuffers: [String: vImage_Buffer] = [:] + private var roundedSquareShape = RoundedSquareShape() + + func cornerRadius(_ size: CGSize, cornerRadius: CGFloat) -> vImage_Buffer? { + let key = "\(size.width):\(size.height):\(cornerRadius)" + if let buffer = imageBuffers[key] { + return buffer + } + roundedSquareShape.rect = .init(origin: .zero, size: size) + roundedSquareShape.cornerRadius = cornerRadius + guard + let image = roundedSquareShape.makeCGImage() else { + return nil + } + imageBuffers[key] = try? vImage_Buffer(cgImage: image) + return imageBuffers[key] + } + + func removeAll() { + for buffer in imageBuffers.values { + buffer.free() + } + imageBuffers.removeAll() + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift new file mode 100644 index 000000000..47086ea88 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/StreamScreenObject.swift @@ -0,0 +1,102 @@ +import AVFoundation +import CoreGraphics +import CoreImage +import Foundation + +/// An object that manages offscreen rendering a streaming video track source. +/// +/// ## Usage +/// ```swift +/// var streamScreenObject = StreamScreenObject() +/// +/// Task { +/// // Register to the Stream's Output observer. +/// stream.addOutput(streamScreenObject) +/// stream.play("yourStreamName") +/// } +/// +/// Task { @ScreenActor in +/// streamScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16) +/// streamScreenObject.size = .init(width: 160 * 2, height: 90 * 2) +/// +/// try? await mixer.screen.addChild(streamScreenObject) +/// } +/// ``` +public final class StreamScreenObject: ScreenObject, ChromaKeyProcessable { + public var chromaKeyColor: CGColor? + + /// The video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + guard videoGravity != oldValue else { + return + } + invalidateLayout() + } + } + + private var sampleBuffer: CMSampleBuffer? { + didSet { + guard sampleBuffer != oldValue else { + return + } + if sampleBuffer == nil { + return + } + invalidateLayout() + } + } + + override var blendMode: ScreenObject.BlendMode { + if 0.0 < cornerRadius || chromaKeyColor != nil { + return .alpha + } + return .normal + } + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard parent != nil, let image = sampleBuffer?.formatDescription?.dimensions.size else { + return super.makeBounds(size) + } + let bounds = super.makeBounds(size) + switch videoGravity { + case .resizeAspect: + let scale = min(bounds.size.width / image.width, bounds.size.height / image.height) + let scaleSize = CGSize(width: image.width * scale, height: image.height * scale) + return super.makeBounds(scaleSize) + case .resizeAspectFill: + return bounds + default: + return bounds + } + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let image: CIImage = makeImage(renderer) else { + return nil + } + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else { + return nil + } + return CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( + bounds.size, + image: pixelBuffer.size + )) + } +} + +extension StreamScreenObject: StreamOutput { + // MARK: HKStreamOutput + nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + } + + nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) { + Task { @ScreenActor in + self.sampleBuffer = video + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift new file mode 100644 index 000000000..5fcc60a84 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/TextScreenObject.swift @@ -0,0 +1,99 @@ +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +/// An object that manages offscreen rendering a text source. +public final class TextScreenObject: ScreenObject { + /// Specifies the text value. + public var string: String = "" { + didSet { + guard string != oldValue else { + return + } + invalidateLayout() + } + } + + #if os(macOS) + /// Specifies the attributes for strings. + public var attributes: [NSAttributedString.Key: Any]? = [ + .font: NSFont.boldSystemFont(ofSize: 32), + .foregroundColor: NSColor.white + ] { + didSet { + invalidateLayout() + } + } + #else + /// Specifies the attributes for strings. + public var attributes: [NSAttributedString.Key: Any]? = [ + .font: UIFont.boldSystemFont(ofSize: 32), + .foregroundColor: UIColor.white + ] { + didSet { + invalidateLayout() + } + } + #endif + + override public var bounds: CGRect { + didSet { + guard bounds != oldValue else { + return + } + context = CGContext( + data: nil, + width: Int(bounds.width), + height: Int(bounds.height), + bitsPerComponent: 8, + bytesPerRow: Int(bounds.width) * 4, + space: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue).rawValue + ) + } + } + + private var context: CGContext? + private var framesetter: CTFramesetter? + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard !string.isEmpty else { + self.framesetter = nil + return .zero + } + let bounds = super.makeBounds(size) + let attributedString = NSAttributedString(string: string, attributes: attributes) + let framesetter = CTFramesetterCreateWithAttributedString(attributedString) + let frameSize = CTFramesetterSuggestFrameSizeWithConstraints( + framesetter, + .init(), + nil, + bounds.size, + nil + ) + self.framesetter = framesetter + return super.makeBounds(frameSize) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let context, let framesetter else { + return nil + } + let path = CGPath(rect: .init(origin: .zero, size: bounds.size), transform: nil) + let frame = CTFramesetterCreateFrame(framesetter, .init(), path, nil) + context.clear(context.boundingBoxOfPath) + CTFrameDraw(frame, context) + return context.makeImage() + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + guard let image: CGImage = makeImage(renderer) else { + return nil + } + return CIImage(cgImage: image) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift new file mode 100644 index 000000000..95e189659 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoEffect.swift @@ -0,0 +1,27 @@ +import AVFoundation +import CoreImage +import Foundation + +/// An object that apply a video effect. +/// - seealso:[Processing an Image Using Built-in Filters](https://developer.apple.com/documentation/coreimage/processing_an_image_using_built-in_filters) +/// +/// ## Example code: +/// ```swift +/// final class MonochromeEffect: VideoEffect { +/// let filter: CIFilter? = CIFilter(name: "CIColorMonochrome") +/// +/// func execute(_ image: CIImage) -> CIImage { +/// guard let filter: CIFilter = filter else { +/// return image +/// } +/// filter.setValue(image, forKey: "inputImage") +/// filter.setValue(CIColor(red: 0.75, green: 0.75, blue: 0.75), forKey: "inputColor") +/// filter.setValue(1.0, forKey: "inputIntensity") +/// return filter.outputImage ?? image +/// } +/// } +/// ``` +public protocol VideoEffect: AnyObject { + /// Executes to apply a video effect. + func execute(_ image: CIImage) -> CIImage +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift new file mode 100644 index 000000000..4f95f056c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Screen/VideoTrackScreenObject.swift @@ -0,0 +1,139 @@ +import AVFoundation +import CoreImage + +/// An object that manages offscreen rendering a video track source. +public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { + static let capacity: Int = 3 + public var chromaKeyColor: CGColor? + + /// Specifies the track number how the displays the visual content. + public var track: UInt8 = 0 { + didSet { + guard track != oldValue else { + return + } + invalidateLayout() + } + } + + /// A value that specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + guard videoGravity != oldValue else { + return + } + invalidateLayout() + } + } + + /// The frame rate. + public var frameRate: Int { + frameTracker.frameRate + } + + override var blendMode: ScreenObject.BlendMode { + if 0.0 < cornerRadius || chromaKeyColor != nil { + return .alpha + } + return .normal + } + + private var queue: TypedBlockQueue? + private var effects: [any VideoEffect] = .init() + private var frameTracker = FrameTracker() + + /// Create a screen object. + override public init() { + super.init() + do { + queue = try TypedBlockQueue(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers) + } catch { + logger.error(error) + } + Task { + horizontalAlignment = .center + } + } + + /// Registers a video effect. + public func registerVideoEffect(_ effect: some VideoEffect) -> Bool { + if effects.contains(where: { $0 === effect }) { + return false + } + effects.append(effect) + return true + } + + /// Unregisters a video effect. + public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool { + if let index = effects.firstIndex(where: { $0 === effect }) { + effects.remove(at: index) + return true + } + return false + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let image: CIImage = makeImage(renderer) else { + return nil + } + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CIImage? { + let presentationTimeStamp = renderer.presentationTimeStamp.convertTime(from: CMClockGetHostTimeClock(), to: renderer.synchronizationClock) + guard let sampleBuffer = queue?.dequeue(presentationTimeStamp), + let pixelBuffer = sampleBuffer.imageBuffer else { + return nil + } + frameTracker.update(sampleBuffer.presentationTimeStamp) + // Resizing before applying the filter for performance optimization. + var image = CIImage(cvPixelBuffer: pixelBuffer, options: renderer.imageOptions).transformed(by: videoGravity.scale( + bounds.size, + image: pixelBuffer.size + )) + if effects.isEmpty { + return image + } else { + for effect in effects { + image = effect.execute(image) + } + return image + } + } + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard parent != nil, let image = queue?.head?.formatDescription?.dimensions.size else { + return super.makeBounds(size) + } + let bounds = super.makeBounds(size) + switch videoGravity { + case .resizeAspect: + let scale = min(bounds.size.width / image.width, bounds.size.height / image.height) + let scaleSize = CGSize(width: image.width * scale, height: image.height * scale) + return super.makeBounds(scaleSize) + case .resizeAspectFill: + return bounds + default: + return bounds + } + } + + override public func draw(_ renderer: some ScreenRenderer) { + super.draw(renderer) + if queue?.isEmpty == false { + invalidateLayout() + } + } + + func enqueue(_ sampleBuffer: CMSampleBuffer) { + try? queue?.enqueue(sampleBuffer) + invalidateLayout() + } + + func reset() { + frameTracker.clear() + try? queue?.reset() + invalidateLayout() + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift new file mode 100644 index 000000000..4e63ef4c0 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/Session.swift @@ -0,0 +1,45 @@ +import Foundation + +package let kSession_maxRetryCount: Int = 3 + +/// Represents the type of session to establish. +public enum SessionMode: Sendable { + /// A publishing session, used to stream media from the local device to a server or peers. + case publish + /// A playback session, used to receive and play media streamed from a server or peers. + case playback +} + +/// Represents the current connection state of a session. +public enum SessionReadyState: Int, Sendable { + /// The session is currently attempting to establish a connection. + case connecting + /// The session has been successfully established and is ready for communication. + case open + /// The session is in the process of closing the connection. + case closing + /// The session has been closed or could not be established. + case closed +} + +/// A type that represents a foundation of streaming session. +/// +/// It is designed so that various streaming services can be used through a common API. +/// While coding with the conventional Connection offered flexibility, +/// it also required a certain level of maturity in properly handling network communication. +public protocol Session: NetworkConnection { + /// The current ready state. + var readyState: AsyncStream { get } + + /// The stream instance. + var stream: any StreamConvertible { get async } + + /// Creates a new session with uri. + init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) + + /// Sets a max retry count. + func setMaxRetryCount(_ maxRetryCount: Int) + + /// Creates a connection to the server. + func connect(_ disconnected: @Sendable @escaping () -> Void) async throws +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift new file mode 100644 index 000000000..92dcd9e19 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilder.swift @@ -0,0 +1,31 @@ +import Foundation + +/// An actor that provides builder for Session object. +public actor SessionBuilder { + private let factory: SessionBuilderFactory + private let uri: URL + private var mode: SessionMode = .publish + private var configuration: (any SessionConfiguration)? + + init(factory: SessionBuilderFactory, uri: URL) { + self.factory = factory + self.uri = uri + } + + /// Sets a method. + public func setMode(_ mode: SessionMode) -> Self { + self.mode = mode + return self + } + + /// Sets a config. + public func setConfiguration(_ configuration: (any SessionConfiguration)?) -> Self { + self.configuration = configuration + return self + } + + /// Creates a Session instance with the specified fields. + public func build() async throws -> (any Session)? { + return try await factory.build(uri, method: mode, configuration: configuration) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift new file mode 100644 index 000000000..0ebf03fe2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionBuilderFactory.swift @@ -0,0 +1,56 @@ +import Foundation + +/// An actor that provides a factory to create a SessionBuifer. +/// +/// ## Prerequisites +/// You need to register the factory in advance as follows. +/// ```swift +/// import RTMPHaishinKit +/// import SRTHaishinKit +/// +/// await SessionBuilderFactory.shared.register(RTMPSessionFactory()) +/// await SessionBuilderFactory.shared.register(SRTSessionFactory()) +/// ``` +public actor SessionBuilderFactory { + /// The shared instance. + public static let shared = SessionBuilderFactory() + + /// The error domain codes. + public enum Error: Swift.Error { + /// An illegal argument. + case illegalArgument + /// The factory can't find a SessionBuilder. + case notFound + } + + private var factories: [any SessionFactory] = [] + + private init() { + } + + /// Makes a new session builder. + public func make(_ uri: URL?) throws -> SessionBuilder { + guard let uri else { + throw Error.illegalArgument + } + return SessionBuilder(factory: self, uri: uri) + } + + /// Registers a factory. + public func register(_ factory: some SessionFactory) { + guard !factories.contains(where: { $0.supportedProtocols == factory.supportedProtocols }) else { + return + } + factories.append(factory) + } + + func build(_ uri: URL?, method: SessionMode, configuration: (any SessionConfiguration)?) throws -> (any Session) { + guard let uri else { + throw Error.illegalArgument + } + for factory in factories where factory.supportedProtocols.contains(uri.scheme ?? "") { + return factory.make(uri, mode: method, configuration: configuration) + } + throw Error.notFound + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift new file mode 100644 index 000000000..91e902756 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionConfiguration.swift @@ -0,0 +1,4 @@ +import Foundation + +public protocol SessionConfiguration: Encodable, Sendable { +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift new file mode 100644 index 000000000..a96277c2b --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Session/SessionFactory.swift @@ -0,0 +1,10 @@ +import Foundation + +/// A type that represents a streaming session factory. +public protocol SessionFactory { + /// The supported protocols. + var supportedProtocols: Set { get } + + /// Makes a new session by uri. + func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift new file mode 100644 index 000000000..4f75cbc19 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayer.swift @@ -0,0 +1,51 @@ +@preconcurrency import AVFoundation + +/// An object that provides the interface to control audio playback. +public final actor AudioPlayer { + private var connected: [AudioPlayerNode: Bool] = [:] + private var audioEngine: AVAudioEngine? + private var playerNodes: [AudioPlayerNode: AVAudioPlayerNode] = [:] + + /// Create an audio player object. + public init(audioEngine: AVAudioEngine) { + self.audioEngine = audioEngine + } + + func isConnected(_ playerNode: AudioPlayerNode) -> Bool { + return connected[playerNode] == true + } + + func connect(_ playerNode: AudioPlayerNode, format: AVAudioFormat?) { + guard let audioEngine, let avPlayerNode = playerNodes[playerNode] else { + return + } + if let format { + audioEngine.connect(avPlayerNode, to: audioEngine.outputNode, format: format) + if !audioEngine.isRunning { + try? audioEngine.start() + } + connected[playerNode] = true + } else { + if audioEngine.isRunning { + audioEngine.stop() + } + audioEngine.disconnectNodeOutput(avPlayerNode) + connected[playerNode] = nil + } + } + + func detach(_ playerNode: AudioPlayerNode) { + if let playerNode = playerNodes[playerNode] { + audioEngine?.detach(playerNode) + } + playerNodes[playerNode] = nil + } + + func makePlayerNode() -> AudioPlayerNode { + let avAudioPlayerNode = AVAudioPlayerNode() + audioEngine?.attach(avAudioPlayerNode) + let playerNode = AudioPlayerNode(player: self, playerNode: avAudioPlayerNode) + playerNodes[playerNode] = avAudioPlayerNode + return playerNode + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift new file mode 100644 index 000000000..1b6cc92a4 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/AudioPlayerNode.swift @@ -0,0 +1,107 @@ +@preconcurrency import AVFoundation +import Foundation + +final actor AudioPlayerNode { + static let bufferCounts: Int = 10 + + var currentTime: TimeInterval { + if playerNode.isPlaying { + guard + let nodeTime = playerNode.lastRenderTime, + let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else { + return 0.0 + } + return TimeInterval(playerTime.sampleTime) / playerTime.sampleRate + } + return 0.0 + } + private(set) var isPaused = false + private(set) var isRunning = false + private(set) var soundTransfrom = SoundTransform() + private let playerNode: AVAudioPlayerNode + private var audioTime = AudioTime() + private var scheduledAudioBuffers: Int = 0 + private var isBuffering = true + private weak var player: AudioPlayer? + private var format: AVAudioFormat? { + didSet { + guard format != oldValue else { + return + } + Task { [format] in + await player?.connect(self, format: format) + } + } + } + + init(player: AudioPlayer, playerNode: AVAudioPlayerNode) { + self.player = player + self.playerNode = playerNode + } + + func setSoundTransfrom(_ soundTransfrom: SoundTransform) { + soundTransfrom.apply(playerNode) + } + + func enqueue(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) async { + format = audioBuffer.format + guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer, await player?.isConnected(self) == true else { + return + } + if !audioTime.hasAnchor { + audioTime.anchor(playerNode.lastRenderTime ?? AVAudioTime(hostTime: 0)) + } + scheduledAudioBuffers += 1 + if !isPaused && !playerNode.isPlaying && Self.bufferCounts <= scheduledAudioBuffers { + playerNode.play() + } + Task { + audioTime.advanced(Int64(audioBuffer.frameLength)) + await playerNode.scheduleBuffer(audioBuffer, at: audioTime.at) + scheduledAudioBuffers -= 1 + if scheduledAudioBuffers == 0 { + isBuffering = true + } + } + } + + func detach() async { + stopRunning() + await player?.detach(self) + } +} + +extension AudioPlayerNode: AsyncRunner { + // MARK: AsyncRunner + func startRunning() { + guard !isRunning else { + return + } + scheduledAudioBuffers = 0 + isRunning = true + } + + func stopRunning() { + guard isRunning else { + return + } + if playerNode.isPlaying { + playerNode.stop() + playerNode.reset() + } + audioTime.reset() + format = nil + isRunning = false + } +} + +extension AudioPlayerNode: Hashable { + // MARK: Hashable + nonisolated public static func == (lhs: AudioPlayerNode, rhs: AudioPlayerNode) -> Bool { + lhs === rhs + } + + nonisolated public func hash(into hasher: inout Hasher) { + hasher.combine(ObjectIdentifier(self)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift new file mode 100644 index 000000000..c7e77114d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/IncomingStream.swift @@ -0,0 +1,91 @@ +@preconcurrency import AVFoundation +import Foundation + +/// An actor that provides a stream playback feature. +package final actor IncomingStream { + public private(set) var isRunning = false + /// The sound transform value control. + public var soundTransfrom: SoundTransform? { + get async { + return await audioPlayerNode?.soundTransfrom + } + } + private lazy var mediaLink = MediaLink() + private lazy var audioCodec = AudioCodec() + private lazy var videoCodec = VideoCodec() + private weak var stream: (any StreamConvertible)? + private var audioPlayerNode: AudioPlayerNode? + + /// Creates a new instance. + public init(_ stream: some StreamConvertible) { + self.stream = stream + } + + /// Sets the sound transform value control. + public func setSoundTransform(_ soundTransfrom: SoundTransform) async { + await audioPlayerNode?.setSoundTransfrom(soundTransfrom) + } + + /// Appends a sample buffer for playback. + public func append(_ buffer: CMSampleBuffer) { + switch buffer.formatDescription?.mediaType { + case .audio: + audioCodec.append(buffer) + case .video: + videoCodec.append(buffer) + default: + break + } + } + + /// Appends an audio buffer for playback. + public func append(_ buffer: AVAudioBuffer, when: AVAudioTime) { + audioCodec.append(buffer, when: when) + } + + /// Attaches an audio player. + public func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async { + await audioPlayerNode?.detach() + audioPlayerNode = await audioPlayer?.makePlayerNode() + await mediaLink.setAudioPlayer(audioPlayerNode) + } +} + +extension IncomingStream: AsyncRunner { + // MARK: AsyncRunner + public func startRunning() { + guard !isRunning else { + return + } + audioCodec.settings.format = .pcm + videoCodec.startRunning() + audioCodec.startRunning() + isRunning = true + // Deliver decoded video frames directly to the stream without MediaLink pacing. + // MediaLink uses a display-link choreographer that adds an extra buffering/pacing + // layer. For recvonly WebRTC streams feeding into an external buffered pipeline + // (like Moblin's BufferedVideo), this double-pacing causes frame drops and jitter. + Task { + for await video in videoCodec.outputStream { + await stream?.append(video) + } + } + Task { + await audioPlayerNode?.startRunning() + for await audio in audioCodec.outputStream { + await audioPlayerNode?.enqueue(audio.0, when: audio.1) + await stream?.append(audio.0, when: audio.1) + } + } + } + + public func stopRunning() { + guard isRunning else { + return + } + videoCodec.stopRunning() + audioCodec.stopRunning() + Task { await audioPlayerNode?.stopRunning() } + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift new file mode 100644 index 000000000..4d83ace89 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/MediaLink.swift @@ -0,0 +1,104 @@ +import CoreMedia +import Foundation + +final actor MediaLink { + static let capacity = 90 + static let duration: TimeInterval = 0.0 + + var dequeue: AsyncStream { + AsyncStream { continutation in + self.continutation = continutation + } + } + private(set) var isRunning = false + private var storage: TypedBlockQueue? + private var continutation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + private var duration: TimeInterval = MediaLink.duration + private var presentationTimeStampOrigin: CMTime = .invalid + private lazy var displayLink = DisplayLinkChoreographer() + private weak var audioPlayer: AudioPlayerNode? + + init() { + do { + storage = try .init(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers) + } catch { + logger.error(error) + } + } + + func enqueue(_ sampleBuffer: CMSampleBuffer) { + guard isRunning else { + return + } + if presentationTimeStampOrigin == .invalid { + presentationTimeStampOrigin = sampleBuffer.presentationTimeStamp + } + do { + try storage?.enqueue(sampleBuffer) + } catch { + logger.error(error) + } + } + + func setAudioPlayer(_ audioPlayer: AudioPlayerNode?) { + self.audioPlayer = audioPlayer + } + + private func getCurrentTime(_ timestamp: TimeInterval) async -> TimeInterval { + defer { + duration += timestamp + } + return await audioPlayer?.currentTime ?? duration + } +} + +extension MediaLink: AsyncRunner { + // MARK: AsyncRunner + func startRunning() { + guard !isRunning else { + return + } + isRunning = true + duration = 0.0 + displayLink.startRunning() + Task { + for await currentTime in displayLink.updateFrames { + guard let storage else { + continue + } + let currentTime = await getCurrentTime(currentTime.targetTimestamp - currentTime.timestamp) + var frameCount = 0 + while !storage.isEmpty { + guard let first = storage.head else { + break + } + if first.presentationTimeStamp.seconds - presentationTimeStampOrigin.seconds <= currentTime { + continutation?.yield(first) + frameCount += 1 + _ = storage.dequeue() + } else { + if 2 < frameCount { + logger.info("droppedFrame: \(frameCount)") + } + break + } + } + } + } + } + + func stopRunning() { + guard isRunning else { + return + } + continutation = nil + displayLink.stopRunning() + presentationTimeStampOrigin = .invalid + try? storage?.reset() + isRunning = false + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift new file mode 100644 index 000000000..ec254cb31 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/OutgoingStream.swift @@ -0,0 +1,118 @@ +import AVFoundation +import Foundation + +/// An object that provides a stream ingest feature. +package final class OutgoingStream { + package private(set) var isRunning = false + + /// The asynchronous sequence for audio output. + package var audioOutputStream: AsyncStream<(AVAudioBuffer, AVAudioTime)> { + return audioCodec.outputStream + } + + /// Specifies the audio compression properties. + package var audioSettings: AudioCodecSettings { + get { + audioCodec.settings + } + set { + audioCodec.settings = newValue + } + } + + /// The audio input format. + package private(set) var audioInputFormat: CMFormatDescription? + + /// The asynchronous sequence for video output. + package var videoOutputStream: AsyncStream { + return videoCodec.outputStream + } + + /// Specifies the video compression properties. + package var videoSettings: VideoCodecSettings { + get { + videoCodec.settings + } + set { + videoCodec.settings = newValue + } + } + + /// Specifies the video buffering count. + package var videoInputBufferCounts = -1 + + /// The asynchronous sequence for video input buffer. + package var videoInputStream: AsyncStream { + if 0 < videoInputBufferCounts { + return AsyncStream(CMSampleBuffer.self, bufferingPolicy: .bufferingNewest(videoInputBufferCounts)) { continuation in + self.videoInputContinuation = continuation + } + } else { + return AsyncStream { continuation in + self.videoInputContinuation = continuation + } + } + } + + /// The video input format. + package private(set) var videoInputFormat: CMFormatDescription? + + private var audioCodec = AudioCodec() + private var videoCodec = VideoCodec() + private var videoInputContinuation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + + /// Create a new instance. + package init() { + } + + /// Appends a sample buffer for publish. + package func append(_ sampleBuffer: CMSampleBuffer) { + switch sampleBuffer.formatDescription?.mediaType { + case .audio: + audioInputFormat = sampleBuffer.formatDescription + audioCodec.append(sampleBuffer) + case .video: + videoInputFormat = sampleBuffer.formatDescription + videoInputContinuation?.yield(sampleBuffer) + default: + break + } + } + + /// Appends a sample buffer for publish. + package func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + audioInputFormat = audioBuffer.format.formatDescription + audioCodec.append(audioBuffer, when: when) + } + + /// Appends a video buffer. + package func append(video sampleBuffer: CMSampleBuffer) { + videoCodec.append(sampleBuffer) + } +} + +extension OutgoingStream: Runner { + // MARK: Runner + package func startRunning() { + guard !isRunning else { + return + } + videoCodec.startRunning() + audioCodec.startRunning() + isRunning = true + } + + package func stopRunning() { + guard isRunning else { + return + } + isRunning = false + videoCodec.stopRunning() + audioCodec.stopRunning() + videoInputContinuation = nil + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift new file mode 100644 index 000000000..b4ae79d4d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/SoundTransform.swift @@ -0,0 +1,33 @@ +import AVFoundation +import Foundation + +/// A structure represents the volume value controller. +public struct SoundTransform: Equatable, Sendable { + /// The default volume. + public static let defaultVolume: Float = 1.0 + /// The default panning of the sound. + public static let defaultPan: Float = 0 + + /// The volume, ranging from 0 (silent) to 1 (full volume) + public var volume = SoundTransform.defaultVolume + /// The panning of the sound + public var pan = SoundTransform.defaultPan + + /// Creates a new instance. + public init(volume: Float = SoundTransform.defaultVolume, pan: Float = SoundTransform.defaultPan) { + self.volume = volume + self.pan = pan + } + + func apply(_ playerNode: AVAudioPlayerNode?) { + playerNode?.volume = volume + playerNode?.pan = pan + } +} + +extension SoundTransform: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + public var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift new file mode 100644 index 000000000..c54cea830 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamBitRateStrategy.swift @@ -0,0 +1,73 @@ +import Foundation + +/// A type with a network bitrate strategy representation. +public protocol StreamBitRateStrategy: Sendable { + /// The mamimum video bitRate. + var mamimumVideoBitRate: Int { get } + /// The mamimum audio bitRate. + var mamimumAudioBitRate: Int { get } + + /// Adjust a bitRate. + func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async +} + +/// An actor provides an algorithm that focuses on video bitrate control. +public final actor StreamVideoAdaptiveBitRateStrategy: StreamBitRateStrategy { + /// The status counts threshold for restoring the status + public static let statusCountsThreshold: Int = 15 + + public let mamimumVideoBitRate: Int + public let mamimumAudioBitRate: Int = 0 + private var sufficientBWCounts: Int = 0 + private var zeroBytesOutPerSecondCounts: Int = 0 + + /// Creates a new instance. + public init(mamimumVideoBitrate: Int) { + self.mamimumVideoBitRate = mamimumVideoBitrate + } + + public func adjustBitrate(_ event: NetworkMonitorEvent, stream: some StreamConvertible) async { + switch event { + case .status: + var videoSettings = await stream.videoSettings + if videoSettings.bitRate == mamimumVideoBitRate { + return + } + if Self.statusCountsThreshold <= sufficientBWCounts { + let incremental = mamimumVideoBitRate / 10 + videoSettings.bitRate = min(videoSettings.bitRate + incremental, mamimumVideoBitRate) + try? await stream.setVideoSettings(videoSettings) + sufficientBWCounts = 0 + } else { + sufficientBWCounts += 1 + } + case .publishInsufficientBWOccured(let report): + sufficientBWCounts = 0 + var videoSettings = await stream.videoSettings + let audioSettings = await stream.audioSettings + if 0 < report.currentBytesOutPerSecond { + let bitRate = Int(report.currentBytesOutPerSecond * 8) / (zeroBytesOutPerSecondCounts + 1) + videoSettings.bitRate = max(bitRate - audioSettings.bitRate, mamimumVideoBitRate / 10) + videoSettings.frameInterval = 0.0 + sufficientBWCounts = 0 + zeroBytesOutPerSecondCounts = 0 + } else { + switch zeroBytesOutPerSecondCounts { + case 2: + videoSettings.frameInterval = VideoCodecSettings.frameInterval10 + case 4: + videoSettings.frameInterval = VideoCodecSettings.frameInterval05 + default: + break + } + try? await stream.setVideoSettings(videoSettings) + zeroBytesOutPerSecondCounts += 1 + } + case .reset: + var videoSettings = await stream.videoSettings + zeroBytesOutPerSecondCounts = 0 + videoSettings.bitRate = mamimumVideoBitRate + try? await stream.setVideoSettings(videoSettings) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift new file mode 100644 index 000000000..669543e83 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamConvertible.swift @@ -0,0 +1,106 @@ +import AVFAudio +import AVFoundation +import CoreImage +import CoreMedia + +/// The interface is the foundation of the RTMPStream and SRTStream. +public protocol StreamConvertible: Actor, MediaMixerOutput { + /// The current state of the stream. + var readyState: StreamReadyState { get } + /// The sound transform value control. + var soundTransform: SoundTransform? { get async } + /// The audio compression properties. + var audioSettings: AudioCodecSettings { get } + /// The video compression properties. + var videoSettings: VideoCodecSettings { get } + + /// Sets the bitrate strategy object. + func setBitRateStrategy(_ bitRateStrategy: (some StreamBitRateStrategy)?) + + /// Sets the audio compression properties. + func setAudioSettings(_ audioSettings: AudioCodecSettings) throws + + /// Sets the video compression properties. + func setVideoSettings(_ videoSettings: VideoCodecSettings) throws + + /// Sets the sound transform value control. + func setSoundTransform(_ soundTransfrom: SoundTransform) async + + /// Sets the video input buffer counts. + func setVideoInputBufferCounts(_ videoInputBufferCounts: Int) + + /// Appends a CMSampleBuffer. + /// - Parameters: + /// - sampleBuffer:The sample buffer to append. + func append(_ sampleBuffer: CMSampleBuffer) + + /// Appends an AVAudioBuffer. + /// - Parameters: + /// - audioBuffer:The audio buffer to append. + /// - when: The audio time to append. + func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) + + /// Attaches an audio player instance for playback. + func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async + + /// Adds an output observer. + func addOutput(_ obserber: some StreamOutput) + + /// Removes an output observer. + func removeOutput(_ observer: some StreamOutput) + + /// Dispatch a network monitor event. + func dispatch(_ event: NetworkMonitorEvent) async +} + +package protocol _Stream: StreamConvertible { + var incoming: IncomingStream { get } + var outgoing: OutgoingStream { get } + var outputs: [any StreamOutput] { get set } + var bitRateStrategy: (any StreamBitRateStrategy)? { get set } +} + +extension _Stream { + public var soundTransform: SoundTransform? { + get async { + await incoming.soundTransfrom + } + } + + public var audioSettings: AudioCodecSettings { + outgoing.audioSettings + } + + public var videoSettings: VideoCodecSettings { + outgoing.videoSettings + } + + public func setBitRateStrategy(_ bitRateStrategy: (some StreamBitRateStrategy)?) { + self.bitRateStrategy = bitRateStrategy + } + + public func setVideoInputBufferCounts(_ videoInputBufferCounts: Int) { + outgoing.videoInputBufferCounts = videoInputBufferCounts + } + + public func setSoundTransform(_ soundTransform: SoundTransform) async { + await incoming.setSoundTransform(soundTransform) + } + + public func attachAudioPlayer(_ audioPlayer: AudioPlayer?) async { + await incoming.attachAudioPlayer(audioPlayer) + } + + public func addOutput(_ observer: some StreamOutput) { + guard !outputs.contains(where: { $0 === observer }) else { + return + } + outputs.append(observer) + } + + public func removeOutput(_ observer: some StreamOutput) { + if let index = outputs.firstIndex(where: { $0 === observer }) { + outputs.remove(at: index) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift new file mode 100644 index 000000000..020fbab20 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamOutput.swift @@ -0,0 +1,11 @@ +import AVFoundation +import CoreMedia +import Foundation + +/// A delegate protocol your app implements to receive capture stream output events. +public protocol StreamOutput: AnyObject, Sendable { + /// Tells the receiver to an audio buffer outgoing. + func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) + /// Tells the receiver to a video buffer outgoing. + func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift new file mode 100644 index 000000000..9eafbcc63 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamReadyState.swift @@ -0,0 +1,15 @@ +import Foundation + +/// The enumeration defines the state a HKStream client is in. +public enum StreamReadyState: Int, Sendable { + /// The stream is idling. + case idle + /// The stream has sent a request to play and is waiting for approval from the server. + case play + /// The stream is playing. + case playing + /// The streamhas sent a request to publish and is waiting for approval from the server. + case publish + /// The stream is publishing. + case publishing +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift new file mode 100644 index 000000000..f225924ce --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Stream/StreamRecorder.swift @@ -0,0 +1,382 @@ +@preconcurrency import AVFoundation + +// MARK: - +/// An actor represents video and audio recorder. +/// +/// This actor is compatible with both StreamOutput and MediaMixerOutput. This means it can record the output from MediaMixer in addition to StreamConvertible. +/// +/// ```swift +/// // An example of recording MediaMixer. +/// let recorder = StreamRecorder() +/// let mixer = MediaMixer() +/// mixer.addOutput(recorder) +/// ``` +/// ```swift +/// // An example of recording streaming. +/// let recorder = StreamRecorder() +/// let mixer = MediaMixer() +/// let stream = RTMPStream() +/// mixer.addOutput(stream) +/// stream.addOutput(recorder) +/// ``` +public actor StreamRecorder { + static let defaultPathExtension = "mp4" + + /// The error domain codes. + public enum Error: Swift.Error { + /// An invalid internal stare. + case invalidState + /// The specified file already exists. + case fileAlreadyExists(outputURL: URL) + /// The specifiled file type is not supported. + case notSupportedFileType(pathExtension: String) + /// Failed to create the AVAssetWriter. + case failedToCreateAssetWriter(error: any Swift.Error) + /// Failed to create the AVAssetWriterInput. + case failedToCreateAssetWriterInput(error: any Swift.Error) + /// Failed to append the PixelBuffer or SampleBuffer. + case failedToAppend(error: (any Swift.Error)?) + /// Failed to finish writing the AVAssetWriter. + case failedToFinishWriting(error: (any Swift.Error)?) + } + + /// The default recording settings. + public static let defaultSettings: [AVMediaType: [String: any Sendable]] = [ + .audio: [ + AVFormatIDKey: Int(kAudioFormatMPEG4AAC), + AVSampleRateKey: 0, + AVNumberOfChannelsKey: 0 + ], + .video: [ + AVVideoCodecKey: AVVideoCodecType.h264, + AVVideoHeightKey: 0, + AVVideoWidthKey: 0 + ] + ] + + private static func isZero(_ value: any Sendable) -> Bool { + switch value { + case let value as Int: + return value == 0 + case let value as Double: + return value == 0 + default: + return false + } + } + + enum SupportedFileType: String { + case mp4 + case mov + + var fileType: AVFileType { + switch self { + case .mp4: + return .mp4 + case .mov: + return .mov + } + } + } + + /// The recorder settings. + public private(set) var settings: [AVMediaType: [String: any Sendable]] = StreamRecorder.defaultSettings + /// The recording output url. + public var outputURL: URL? { + return writer?.outputURL + } + /// The current error. + public var error: AsyncStream { + AsyncStream { continuation in + self.continuation = continuation + } + } + /// The recording or not. + public private(set) var isRecording = false + /// The the movie fragment interval in sec. + public private(set) var movieFragmentInterval: Double? + public private(set) var videoTrackId: UInt8? = UInt8.max + public private(set) var audioTrackId: UInt8? = UInt8.max + + #if os(macOS) && !targetEnvironment(macCatalyst) + /// The default file save location. + public private(set) var moviesDirectory: URL = { + URL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.moviesDirectory, .userDomainMask, true)[0]) + }() + #else + /// The default file save location. + public private(set) lazy var moviesDirectory: URL = { + URL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]) + }() + #endif + + private var isReadyForStartWriting: Bool { + guard let writer = writer else { + return false + } + return settings.count == writer.inputs.count + } + private var writer: AVAssetWriter? + private var continuation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + private var writerInputs: [AVMediaType: AVAssetWriterInput] = [:] + private var audioPresentationTime: CMTime = .zero + private var videoPresentationTime: CMTime = .zero + private var dimensions: CMVideoDimensions = .init(width: 0, height: 0) + + /// Creates a new recorder. + public init() { + } + + /// Sets the movie fragment interval in sec. + /// + /// This value allows the file to be written continuously, so the file will remain even if the app crashes or is forcefully terminated. A value of 10 seconds or more is recommended. + /// - seealso: https://developer.apple.com/documentation/avfoundation/avassetwriter/1387469-moviefragmentinterval + public func setMovieFragmentInterval(_ movieFragmentInterval: Double?) { + if let movieFragmentInterval { + self.movieFragmentInterval = max(10.0, movieFragmentInterval) + } else { + self.movieFragmentInterval = nil + } + } + + /// Starts recording. + /// + /// For iOS, if the URL is unspecified, the file will be saved in .documentDirectory. You can specify a folder of your choice, but please use an absolute path. + /// + /// ``` + /// try? await recorder.startRecording(nil) + /// // -> $documentDirectory/B644F60F-0959-4F54-9D14-7F9949E02AD8.mp4 + /// + /// try? await recorder.startRecording(URL(string: "dir/sample.mp4")) + /// // -> $documentDirectory/dir/sample.mp4 + /// + /// try? await recorder.startRecording(await recorder.moviesDirectory.appendingPathComponent("sample.mp4")) + /// // -> $documentDirectory/sample.mp4 + /// + /// try? await recorder.startRecording(URL(string: "dir")) + /// // -> $documentDirectory/dir/33FA7D32-E0A8-4E2C-9980-B54B60654044.mp4 + /// ``` + /// + /// - Note: Folders are not created automatically, so it’s expected that the target directory is created in advance. + /// - Parameters: + /// - url: The file path for recording. If nil is specified, a unique file path will be returned automatically. + /// - settings: Settings for recording. + /// - Throws: `Error.fileAlreadyExists` when case file already exists. + /// - Throws: `Error.notSupportedFileType` when case species not supported format. + public func startRecording(_ url: URL? = nil, settings: [AVMediaType: [String: any Sendable]] = StreamRecorder.defaultSettings) async throws { + guard !isRecording else { + throw Error.invalidState + } + + let outputURL = makeOutputURL(url) + if FileManager.default.fileExists(atPath: outputURL.path) { + throw Error.fileAlreadyExists(outputURL: outputURL) + } + + var fileType: AVFileType = .mp4 + if let supportedFileType = SupportedFileType(rawValue: outputURL.pathExtension) { + fileType = supportedFileType.fileType + } else { + throw Error.notSupportedFileType(pathExtension: outputURL.pathExtension) + } + + writer = try AVAssetWriter(outputURL: outputURL, fileType: fileType) + if let movieFragmentInterval { + writer?.movieFragmentInterval = CMTime(seconds: movieFragmentInterval, preferredTimescale: 1) + } + videoPresentationTime = .zero + audioPresentationTime = .zero + self.settings = settings + + isRecording = true + } + + /// Stops recording. + /// + /// ## Example of saving to the Photos app. + /// ``` + /// do { + /// let outputURL = try await recorder.stopRecording() + /// PHPhotoLibrary.shared().performChanges({() -> Void in + /// PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL) + /// }, completionHandler: { _, error -> Void in + /// try? FileManager.default.removeItem(at: outputURL) + /// } + /// } catch { + /// print(error) + /// } + /// ``` + public func stopRecording() async throws -> URL { + guard isRecording else { + throw Error.invalidState + } + defer { + isRecording = false + continuation = nil + self.writer = nil + self.writerInputs.removeAll() + } + guard let writer = writer, writer.status == .writing else { + throw Error.failedToFinishWriting(error: writer?.error) + } + for (_, input) in writerInputs { + input.markAsFinished() + } + await writer.finishWriting() + return writer.outputURL + } + + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) { + switch mediaType { + case .audio: + audioTrackId = id + case .video: + videoTrackId = id + default: + break + } + } + + private func makeOutputURL(_ url: URL?) -> URL { + guard let url else { + return moviesDirectory.appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension) + } + // AVAssetWriter requires a isFileURL condition. + guard url.isFileURL else { + return url.pathExtension.isEmpty ? + moviesDirectory.appendingPathComponent(url.path).appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension) : + moviesDirectory.appendingPathComponent(url.path) + } + return url.pathExtension.isEmpty ? url.appendingPathComponent(UUID().uuidString).appendingPathExtension(Self.defaultPathExtension) : url + } + + private func append(_ sampleBuffer: CMSampleBuffer) { + guard isRecording else { + return + } + let mediaType: AVMediaType = (sampleBuffer.formatDescription?.mediaType == .video) ? .video : .audio + guard + let writer, + let input = makeWriterInput(mediaType, sourceFormatHint: sampleBuffer.formatDescription), + isReadyForStartWriting else { + return + } + + switch writer.status { + case .unknown: + writer.startWriting() + writer.startSession(atSourceTime: sampleBuffer.presentationTimeStamp) + default: + break + } + + if input.isReadyForMoreMediaData { + switch mediaType { + case .audio: + if input.append(sampleBuffer) { + audioPresentationTime = sampleBuffer.presentationTimeStamp + } else { + continuation?.yield(Error.failedToAppend(error: writer.error)) + } + case .video: + if input.append(sampleBuffer) { + videoPresentationTime = sampleBuffer.presentationTimeStamp + } else { + continuation?.yield(Error.failedToAppend(error: writer.error)) + } + default: + break + } + } + } + + private func makeWriterInput(_ mediaType: AVMediaType, sourceFormatHint: CMFormatDescription?) -> AVAssetWriterInput? { + guard writerInputs[mediaType] == nil else { + return writerInputs[mediaType] + } + + var outputSettings: [String: Any] = [:] + if let settings = self.settings[mediaType] { + switch mediaType { + case .audio: + guard + let format = sourceFormatHint, + let inSourceFormat = format.audioStreamBasicDescription else { + break + } + for (key, value) in settings { + switch key { + case AVSampleRateKey: + outputSettings[key] = Self.isZero(value) ? inSourceFormat.mSampleRate : value + case AVNumberOfChannelsKey: + outputSettings[key] = Self.isZero(value) ? Int(inSourceFormat.mChannelsPerFrame) : value + default: + outputSettings[key] = value + } + } + case .video: + dimensions = sourceFormatHint?.dimensions ?? .init(width: 0, height: 0) + for (key, value) in settings { + switch key { + case AVVideoHeightKey: + outputSettings[key] = Self.isZero(value) ? Int(dimensions.height) : value + case AVVideoWidthKey: + outputSettings[key] = Self.isZero(value) ? Int(dimensions.width) : value + default: + outputSettings[key] = value + } + } + default: + break + } + } + + var input: AVAssetWriterInput? + if writer?.canApply(outputSettings: outputSettings, forMediaType: mediaType) == true { + input = AVAssetWriterInput(mediaType: mediaType, outputSettings: outputSettings, sourceFormatHint: sourceFormatHint) + input?.expectsMediaDataInRealTime = true + self.writerInputs[mediaType] = input + if let input { + self.writer?.add(input) + } + } + + return input + } +} + +extension StreamRecorder: StreamOutput { + // MARK: HKStreamOutput + nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) { + Task { await append(video) } + } + + nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + guard let sampleBuffer = (audio as? AVAudioPCMBuffer)?.makeSampleBuffer(when) else { + return + } + Task { await append(sampleBuffer) } + } +} + +extension StreamRecorder: MediaMixerOutput { + // MARK: MediaMixerOutput + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { + await append(sampleBuffer) + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + guard let sampleBuffer = buffer.makeSampleBuffer(when) else { + return + } + Task { + await append(sampleBuffer) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift new file mode 100644 index 000000000..c1fadcf1c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AVAudioUtil.swift @@ -0,0 +1,52 @@ +import AVFAudio +import Foundation + +enum AVAudioUtil { + static func makeAudioFormat(_ formatDescription: CMFormatDescription?) -> AVAudioFormat? { + guard var inSourceFormat = formatDescription?.audioStreamBasicDescription else { + return nil + } + if inSourceFormat.mFormatID == kAudioFormatLinearPCM && kLinearPCMFormatFlagIsBigEndian == (inSourceFormat.mFormatFlags & kLinearPCMFormatFlagIsBigEndian) { + let interleaved = !((inSourceFormat.mFormatFlags & kLinearPCMFormatFlagIsNonInterleaved) == kLinearPCMFormatFlagIsNonInterleaved) + if let channelLayout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) { + return .init( + commonFormat: .pcmFormatInt16, + sampleRate: inSourceFormat.mSampleRate, + interleaved: interleaved, + channelLayout: channelLayout + ) + } + return .init( + commonFormat: .pcmFormatInt16, + sampleRate: inSourceFormat.mSampleRate, + channels: inSourceFormat.mChannelsPerFrame, + interleaved: interleaved + ) + } + if let layout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) { + return .init(streamDescription: &inSourceFormat, channelLayout: layout) + } + return .init(streamDescription: &inSourceFormat) + } + + static func makeChannelLayout(_ numberOfChannels: UInt32) -> AVAudioChannelLayout? { + guard 2 < numberOfChannels else { + return nil + } + switch numberOfChannels { + case 3: + // https://github.com/shogo4405/HaishinKit.swift/issues/1444 + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_MPEG_3_0_B) + case 4: + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_4) + case 5: + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_5) + case 6: + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_6) + case 8: + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_AudioUnit_8) + default: + return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | numberOfChannels) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift new file mode 100644 index 000000000..7f8957a74 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamed.swift @@ -0,0 +1,32 @@ +import Foundation + +@propertyWrapper +package struct AsyncStreamed { + package var wrappedValue: AsyncStream { + get { + defer { + continuation.yield(value) + } + return stream + } + @available(*, unavailable) + set { _ = newValue } + } + package var value: T { + didSet { + guard value != oldValue else { + return + } + continuation.yield(value) + } + } + private let stream: AsyncStream + private let continuation: AsyncStream.Continuation + + package init(_ value: T, bufferingPolicy limit: AsyncStream.Continuation.BufferingPolicy = .unbounded) { + let (stream, continuation) = AsyncStream.makeStream(of: T.self, bufferingPolicy: limit) + self.value = value + self.stream = stream + self.continuation = continuation + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift new file mode 100644 index 000000000..ee5a181a4 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AsyncStreamedFlow.swift @@ -0,0 +1,32 @@ +import Foundation + +@propertyWrapper +package struct AsyncStreamedFlow { + package var wrappedValue: AsyncStream { + mutating get { + let (stream, continuation) = AsyncStream.makeStream(of: T.self, bufferingPolicy: bufferingPolicy) + self.continuation = continuation + return stream + } + @available(*, unavailable) + set { _ = newValue } + } + private let bufferingPolicy: AsyncStream.Continuation.BufferingPolicy + private var continuation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + + package init(_ bufferingPolicy: AsyncStream.Continuation.BufferingPolicy = .unbounded) { + self.bufferingPolicy = bufferingPolicy + } + + package func yield(_ value: T) { + continuation?.yield(value) + } + + package mutating func finish() { + continuation = nil + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift new file mode 100644 index 000000000..8c8e8300c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/AudioTime.swift @@ -0,0 +1,55 @@ +import AVFoundation +import Foundation + +/// A helper class for interoperating between AVAudioTime and CMTime. +/// Conversion fails without hostTime on the AVAudioTime side, and cannot be saved with AVAssetWriter. +final class AudioTime { + var at: AVAudioTime { + let now = AVAudioTime(sampleTime: sampleTime, atRate: sampleRate) + guard let anchorTime else { + return now + } + return now.extrapolateTime(fromAnchor: anchorTime) ?? now + } + + var hasAnchor: Bool { + return anchorTime != nil + } + + private var sampleRate: Double = 0.0 + private var anchorTime: AVAudioTime? + private var sampleTime: AVAudioFramePosition = 0 + + func advanced(_ count: AVAudioFramePosition) { + sampleTime += count + } + + func anchor(_ time: CMTime, sampleRate: Double) { + guard anchorTime == nil else { + return + } + self.sampleRate = sampleRate + if time.timescale == Int32(sampleRate) { + sampleTime = time.value + } else { + // ReplayKit .appAudio + sampleTime = Int64(Double(time.value) * sampleRate / Double(time.timescale)) + } + anchorTime = .init(hostTime: AVAudioTime.hostTime(forSeconds: time.seconds), sampleTime: sampleTime, atRate: sampleRate) + } + + func anchor(_ time: AVAudioTime) { + guard anchorTime == nil else { + return + } + sampleRate = time.sampleRate + sampleTime = time.sampleTime + anchorTime = time + } + + func reset() { + sampleRate = 0.0 + sampleTime = 0 + anchorTime = nil + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift new file mode 100644 index 000000000..a6aa59403 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/ByteArray.swift @@ -0,0 +1,398 @@ +import Foundation + +protocol ByteArrayConvertible { + var data: Data { get } + var length: Int { get set } + var position: Int { get set } + var bytesAvailable: Int { get } + + subscript(i: Int) -> UInt8 { get set } + + @discardableResult + func writeUInt8(_ value: UInt8) -> Self + func readUInt8() throws -> UInt8 + + @discardableResult + func writeInt8(_ value: Int8) -> Self + func readInt8() throws -> Int8 + + @discardableResult + func writeUInt16(_ value: UInt16) -> Self + func readUInt16() throws -> UInt16 + + @discardableResult + func writeInt16(_ value: Int16) -> Self + func readInt16() throws -> Int16 + + @discardableResult + func writeUInt24(_ value: UInt32) -> Self + func readUInt24() throws -> UInt32 + + @discardableResult + func writeUInt32(_ value: UInt32) -> Self + func readUInt32() throws -> UInt32 + + @discardableResult + func writeInt32(_ value: Int32) -> Self + func readInt32() throws -> Int32 + + @discardableResult + func writeUInt64(_ value: UInt64) -> Self + func readUInt64() throws -> UInt64 + + @discardableResult + func writeInt64(_ value: Int64) -> Self + func readInt64() throws -> Int64 + + @discardableResult + func writeDouble(_ value: Double) -> Self + func readDouble() throws -> Double + + @discardableResult + func writeFloat(_ value: Float) -> Self + func readFloat() throws -> Float + + @discardableResult + func writeUTF8(_ value: String) throws -> Self + func readUTF8() throws -> String + + @discardableResult + func writeUTF8Bytes(_ value: String) -> Self + func readUTF8Bytes(_ length: Int) throws -> String + + @discardableResult + func writeBytes(_ value: Data) -> Self + func readBytes(_ length: Int) throws -> Data + + @discardableResult + func clear() -> Self +} + +// MARK: - +/** + * The ByteArray class provides methods and properties the reading or writing with binary data. + */ +class ByteArray: ByteArrayConvertible { + static let fillZero: [UInt8] = [0x00] + + static let sizeOfInt8: Int = 1 + static let sizeOfInt16: Int = 2 + static let sizeOfInt24: Int = 3 + static let sizeOfInt32: Int = 4 + static let sizeOfFloat: Int = 4 + static let sizeOfInt64: Int = 8 + static let sizeOfDouble: Int = 8 + + /** + * The ByteArray error domain codes. + */ + enum Error: Swift.Error { + /// Error cause end of data. + case eof + /// Failed to parse + case parse + } + + /// Creates an empty ByteArray. + init() { + } + + /// Creates a ByteArray with data. + init(data: Data) { + self.data = data + } + + private(set) var data = Data() + + /// Specifies the length of buffer. + var length: Int { + get { + data.count + } + set { + switch true { + case (data.count < newValue): + data.append(Data(count: newValue - data.count)) + case (newValue < data.count): + data = data.subdata(in: 0.. UInt8 { + get { + data[i] + } + set { + data[i] = newValue + } + } + + /// Reading an UInt8 value. + func readUInt8() throws -> UInt8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return data[position] + } + + /// Writing an UInt8 value. + @discardableResult + func writeUInt8(_ value: UInt8) -> Self { + writeBytes(value.data) + } + + /// Readning an Int8 value. + func readInt8() throws -> Int8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return Int8(bitPattern: UInt8(data[position])) + } + + /// Writing an Int8 value. + @discardableResult + func writeInt8(_ value: Int8) -> Self { + writeBytes(UInt8(bitPattern: value).data) + } + + /// Readning an UInt16 value. + func readUInt16() throws -> UInt16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return UInt16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int16 value. + func readInt16() throws -> Int16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return Int16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt24 value. + func readUInt24() throws -> UInt32 { + guard ByteArray.sizeOfInt24 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt24 + return UInt32(data: ByteArray.fillZero + data[position - ByteArray.sizeOfInt24.. Self { + writeBytes(value.bigEndian.data.subdata(in: 1.. UInt32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return UInt32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int32 value. + func readInt32() throws -> Int32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return Int32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Writing an UInt64 value. + @discardableResult + func writeUInt64(_ value: UInt64) -> Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt64 value. + func readUInt64() throws -> UInt64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return UInt64(data: data[position - ByteArray.sizeOfInt64.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int64 value. + func readInt64() throws -> Int64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return Int64(data: data[position - ByteArray.sizeOfInt64.. Double { + guard ByteArray.sizeOfDouble <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfDouble + return Double(data: Data(data.subdata(in: position - ByteArray.sizeOfDouble.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a Float value. + func readFloat() throws -> Float { + guard ByteArray.sizeOfFloat <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfFloat + return Float(data: Data(data.subdata(in: position - ByteArray.sizeOfFloat.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a string as UTF8 value. + func readUTF8() throws -> String { + try readUTF8Bytes(Int(try readUInt16())) + } + + /// Writing a string as UTF8 value. + @discardableResult + func writeUTF8(_ value: String) throws -> Self { + let utf8 = Data(value.utf8) + return writeUInt16(UInt16(utf8.count)).writeBytes(utf8) + } + + /// Clear the buffer. + @discardableResult + func clear() -> Self { + position = 0 + data.removeAll() + return self + } + + func readUTF8Bytes(_ length: Int) throws -> String { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + + guard let result = String(data: data.subdata(in: position - length.. Self { + writeBytes(Data(value.utf8)) + } + + func readBytes(_ length: Int) throws -> Data { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + return data.subdata(in: position - length.. Self { + if position == data.count { + data.append(value) + position = data.count + return self + } + let length: Int = min(data.count, value.count) + data[position.. Void)) { + let r: Int = (data.count - position) % length + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex.advanced(by: -r), by: length) { + lambda(ByteArray(data: data.subdata(in: index.. [UInt32] { + let size: Int = MemoryLayout.size + if (data.endIndex - position) % size != 0 { + return [] + } + var result: [UInt32] = [] + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex, by: size) { + result.append(UInt32(data: data[index.. AVCaptureVideoOrientation? { + guard let device = notification.object as? UIDevice else { + return nil + } + return videoOrientation(by: device.orientation) + } + + /// Looks up the AVCaptureVideoOrientation by an UIDeviceOrientation. + public static func videoOrientation(by orientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? { + switch orientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + default: + return nil + } + } + + /// Looks up the AVCaptureVideoOrientation by an UIInterfaceOrientation. + public static func videoOrientation(by orientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? { + switch orientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + default: + return nil + } + } + + /// Device is connected a headphone or not. + public static func isHeadphoneConnected(_ ports: Set = [.headphones, .bluetoothLE, .bluetoothHFP, .bluetoothA2DP]) -> Bool { + let outputs = AVAudioSession.sharedInstance().currentRoute.outputs + for description in outputs where ports.contains(description.portType) { + return true + } + return false + } + + /// Device is disconnected a headphone or not. + public static func isHeadphoneDisconnected(_ notification: Notification, ports: Set = [.headphones, .bluetoothLE, .bluetoothHFP, .bluetoothA2DP]) -> Bool { + guard let previousRoute = notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription else { + return false + } + var isHeadohoneConnected = false + for output in previousRoute.outputs where ports.contains(output.portType) { + isHeadohoneConnected = true + break + } + if !isHeadohoneConnected { + return false + } + return !DeviceUtil.isHeadphoneConnected(ports) + } +} +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift new file mode 100644 index 000000000..902eaacbf --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/FrameTracker.swift @@ -0,0 +1,26 @@ +import CoreMedia + +struct FrameTracker { + static let seconds = 1.0 + + private(set) var frameRate: Int = 0 + private var count = 0 + private var rotated: CMTime = .zero + + init() { + } + + mutating func update(_ time: CMTime) { + count += 1 + if Self.seconds <= (time - rotated).seconds { + rotated = time + frameRate = count + count = 0 + } + } + + mutating func clear() { + count = 0 + rotated = .zero + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift new file mode 100644 index 000000000..f778c3e5b --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/Runner.swift @@ -0,0 +1,21 @@ +import Foundation + +/// A type that methods for running. +public protocol Runner: AnyObject { + /// Indicates whether the receiver is running. + var isRunning: Bool { get } + /// Tells the receiver to start running. + func startRunning() + /// Tells the receiver to stop running. + func stopRunning() +} + +/// A type that methods for running. +public protocol AsyncRunner: Actor { + /// Indicates whether the receiver is running. + var isRunning: Bool { get } + /// Tells the receiver to start running. + func startRunning() async + /// Tells the receiver to stop running. + func stopRunning() async +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift new file mode 100644 index 000000000..8744b8723 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/Util/TypedBlockQueue.swift @@ -0,0 +1,65 @@ +import CoreMedia +import Foundation + +final class TypedBlockQueue { + private let queue: CMBufferQueue + private let capacity: CMItemCount + + @inlinable @inline(__always) var head: T? { + guard let head = queue.head else { + return nil + } + return (head as! T) + } + + @inlinable @inline(__always) var isEmpty: Bool { + queue.isEmpty + } + + @inlinable @inline(__always) var duration: CMTime { + queue.duration + } + + init(capacity: CMItemCount, handlers: CMBufferQueue.Handlers) throws { + self.capacity = capacity + self.queue = try CMBufferQueue(capacity: capacity, handlers: handlers) + } + + @inlinable + @inline(__always) + func enqueue(_ buffer: T) throws { + try queue.enqueue(buffer) + } + + @inlinable + @inline(__always) + func dequeue() -> T? { + guard let value = queue.dequeue() else { + return nil + } + return (value as! T) + } + + @inlinable + @inline(__always) + func reset() throws { + try queue.reset() + } +} + +extension TypedBlockQueue where T == CMSampleBuffer { + func dequeue(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? { + var result: CMSampleBuffer? + while !queue.isEmpty { + guard let head else { + break + } + if head.presentationTimeStamp <= presentationTimeStamp { + result = dequeue() + } else { + return result + } + } + return result + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift new file mode 100644 index 000000000..66dfa89ae --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKView.swift @@ -0,0 +1,174 @@ +#if os(iOS) || os(tvOS) || os(macOS) + +import AVFoundation +import MetalKit + +/// A view that displays a video content of a NetStream object which uses Metal api. +public class MTHKView: MTKView { + /// Specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect + public var videoTrackId: UInt8? = UInt8.max + public var audioTrackId: UInt8? + private var displayImage: CIImage? + private lazy var commandQueue: (any MTLCommandQueue)? = { + return device?.makeCommandQueue() + }() + private var context: CIContext? + private var effects: [any VideoEffect] = .init() + + /// Initializes and returns a newly allocated view object with the specified frame rectangle. + public init(frame: CGRect) { + super.init(frame: frame, device: MTLCreateSystemDefaultDevice()) + awakeFromNib() + } + + /// Returns an object initialized from data in a given unarchiver. + public required init(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + self.device = MTLCreateSystemDefaultDevice() + } + + /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file. + override public func awakeFromNib() { + super.awakeFromNib() + Task { @MainActor in + framebufferOnly = false + enableSetNeedsDisplay = true + if let device { + context = CIContext(mtlDevice: device, options: [.cacheIntermediates: false, .name: "MTHKView"]) + } + } + } + + /// Redraws the view’s contents. + override public func draw(_ rect: CGRect) { + guard + let context, + let currentDrawable = currentDrawable, + let commandBuffer = commandQueue?.makeCommandBuffer() else { + return + } + if + let currentRenderPassDescriptor = currentRenderPassDescriptor, + let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) { + renderCommandEncoder.endEncoding() + } + guard let displayImage else { + commandBuffer.present(currentDrawable) + commandBuffer.commit() + return + } + + var scaleX: CGFloat = 0 + var scaleY: CGFloat = 0 + var translationX: CGFloat = 0 + var translationY: CGFloat = 0 + switch videoGravity { + case .resize: + scaleX = drawableSize.width / displayImage.extent.width + scaleY = drawableSize.height / displayImage.extent.height + case .resizeAspect: + let scale: CGFloat = min(drawableSize.width / displayImage.extent.width, drawableSize.height / displayImage.extent.height) + scaleX = scale + scaleY = scale + translationX = (drawableSize.width - displayImage.extent.width * scale) / scaleX / 2 + translationY = (drawableSize.height - displayImage.extent.height * scale) / scaleY / 2 + case .resizeAspectFill: + let scale: CGFloat = max(drawableSize.width / displayImage.extent.width, drawableSize.height / displayImage.extent.height) + scaleX = scale + scaleY = scale + translationX = (drawableSize.width - displayImage.extent.width * scale) / scaleX / 2 + translationY = (drawableSize.height - displayImage.extent.height * scale) / scaleY / 2 + default: + break + } + + var scaledImage: CIImage = displayImage + for effect in effects { + scaledImage = effect.execute(scaledImage) + } + + scaledImage = scaledImage + .transformed(by: CGAffineTransform(translationX: translationX, y: translationY)) + .transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY)) + + let destination = CIRenderDestination( + width: Int(drawableSize.width), + height: Int(drawableSize.height), + pixelFormat: colorPixelFormat, + commandBuffer: commandBuffer, + mtlTextureProvider: { () -> (any MTLTexture) in + return currentDrawable.texture + }) + + _ = try? context.startTask(toRender: scaledImage, to: destination) + + commandBuffer.present(currentDrawable) + commandBuffer.commit() + } + + /// Registers a video effect. + public func registerVideoEffect(_ effect: some VideoEffect) -> Bool { + if effects.contains(where: { $0 === effect }) { + return false + } + effects.append(effect) + return true + } + + /// Unregisters a video effect. + public func unregisterVideoEffect(_ effect: some VideoEffect) -> Bool { + if let index = effects.firstIndex(where: { $0 === effect }) { + effects.remove(at: index) + return true + } + return false + } +} + +extension MTHKView: MediaMixerOutput { + // MARK: MediaMixerOutput + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async { + switch mediaType { + case .audio: + break + case .video: + videoTrackId = id + default: + break + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { @MainActor in + displayImage = try? sampleBuffer.imageBuffer?.makeCIImage() + #if os(macOS) + self.needsDisplay = true + #else + self.setNeedsDisplay() + #endif + } + } +} + +extension MTHKView: StreamOutput { + // MARK: HKStreamOutput + nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + } + + nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) { + Task { @MainActor in + displayImage = try? video.imageBuffer?.makeCIImage() + #if os(macOS) + self.needsDisplay = true + #else + self.setNeedsDisplay() + #endif + } + } +} + +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift new file mode 100644 index 000000000..307225a0a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/MTHKViewRepresentable.swift @@ -0,0 +1,89 @@ +#if os(iOS) || os(tvOS) +import AVFoundation +import SwiftUI + +/// A SwiftUI view that displays using a `MTHKView`. +public struct MTHKViewRepresentable: UIViewRepresentable { + /// A type that presents the captured content. + public protocol PreviewSource { + func connect(to view: MTHKView) + } + + public typealias UIViewType = MTHKView + + /// Specifies the preview source. + public let previewSource: any PreviewSource + /// Specifies the videoGravity for MTHKView. + public var videoGravity: AVLayerVideoGravity = .resizeAspect + + private var view = MTHKView(frame: .zero) + + /// Creates a view representable. + public init(previewSource: some PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) { + self.previewSource = previewSource + self.videoGravity = videoGravity + } + + /// Selects track id for streaming. + public func track(_ id: UInt8?) -> Self { + Task { @MainActor in + await view.selectTrack(id, mediaType: .video) + } + return self + } + + public func makeUIView(context: Context) -> MTHKView { + previewSource.connect(to: view) + return view + } + + public func updateUIView(_ uiView: MTHKView, context: Context) { + uiView.videoGravity = videoGravity + } +} + +#elseif os(macOS) +import AVFoundation +import SwiftUI + +/// A SwiftUI view that displays using a `MTHKView`. +public struct MTHKViewRepresentable: NSViewRepresentable { + /// A type that presents the captured content. + public protocol PreviewSource { + func connect(to view: MTHKView) + } + + public typealias NSViewType = MTHKView + + /// Specifies the preview source. + public let previewSource: any PreviewSource + /// Specifies the videoGravity for MTHKView. + public var videoGravity: AVLayerVideoGravity = .resizeAspect + + private var view = MTHKView(frame: .zero) + + /// Creates a view representable. + public init(previewSource: some PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) { + self.previewSource = previewSource + self.videoGravity = videoGravity + } + + /// Selects track id for streaming. + public func track(_ id: UInt8?) -> Self { + Task { @MainActor in + await view.selectTrack(id, mediaType: .video) + } + return self + } + + public func makeNSView(context: Context) -> MTHKView { + previewSource.connect(to: view) + return view + } + + public func updateNSView(_ nsView: MTHKView, context: Context) { + nsView.videoGravity = videoGravity + } +} + +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift new file mode 100644 index 000000000..a94eb108d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKView.swift @@ -0,0 +1,143 @@ +#if os(iOS) || os(tvOS) || os(visionOS) +import AVFoundation +import Foundation +import UIKit + +/// A view that displays a video content of a NetStream object which uses AVSampleBufferDisplayLayer api. +public class PiPHKView: UIView { + /// The view’s background color. + public static var defaultBackgroundColor: UIColor = .black + + /// Returns the class used to create the layer for instances of this class. + override public class var layerClass: AnyClass { + AVSampleBufferDisplayLayer.self + } + + /// The view’s Core Animation layer used for rendering. + override public var layer: AVSampleBufferDisplayLayer { + super.layer as! AVSampleBufferDisplayLayer + } + + public var videoTrackId: UInt8? = UInt8.max + public var audioTrackId: UInt8? + + /// A value that specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + layer.videoGravity = videoGravity + } + } + + /// Initializes and returns a newly allocated view object with the specified frame rectangle. + override public init(frame: CGRect) { + super.init(frame: frame) + awakeFromNib() + } + + /// Returns an object initialized from data in a given unarchiver. + public required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + } + + /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file. + override public func awakeFromNib() { + super.awakeFromNib() + Task { @MainActor in + backgroundColor = Self.defaultBackgroundColor + layer.backgroundColor = Self.defaultBackgroundColor.cgColor + layer.videoGravity = videoGravity + } + } +} +#else + +import AppKit +import AVFoundation + +/// A view that displays a video content of a NetStream object which uses AVSampleBufferDisplayLayer api. +public class PiPHKView: NSView { + /// The view’s background color. + public static var defaultBackgroundColor: NSColor = .black + + /// A value that specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + layer?.setValue(videoGravity, forKey: "videoGravity") + } + } + + /// Specifies how the video is displayed with in track. + public var videoTrackId: UInt8? = UInt8.max + public var audioTrackId: UInt8? + + /// Initializes and returns a newly allocated view object with the specified frame rectangle. + override public init(frame: CGRect) { + super.init(frame: frame) + awakeFromNib() + } + + /// Returns an object initialized from data in a given unarchiver. + public required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + } + + /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file. + override public func awakeFromNib() { + super.awakeFromNib() + Task { @MainActor in + wantsLayer = true + layer = AVSampleBufferDisplayLayer() + layer?.backgroundColor = PiPHKView.defaultBackgroundColor.cgColor + layer?.setValue(videoGravity, forKey: "videoGravity") + } + } +} + +#endif + +extension PiPHKView: MediaMixerOutput { + // MARK: MediaMixerOutput + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) async { + switch mediaType { + case .audio: + break + case .video: + videoTrackId = id + default: + break + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { @MainActor in + #if os(macOS) + (layer as? AVSampleBufferDisplayLayer)?.enqueue(sampleBuffer) + self.needsDisplay = true + #else + (layer as AVSampleBufferDisplayLayer).enqueue(sampleBuffer) + self.setNeedsDisplay() + #endif + } + } +} + +extension PiPHKView: StreamOutput { + // MARK: HKStreamOutput + nonisolated public func stream(_ stream: some StreamConvertible, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + } + + nonisolated public func stream(_ stream: some StreamConvertible, didOutput video: CMSampleBuffer) { + Task { @MainActor in + #if os(macOS) + (layer as? AVSampleBufferDisplayLayer)?.enqueue(video) + self.needsDisplay = true + #else + (layer as AVSampleBufferDisplayLayer).enqueue(video) + self.setNeedsDisplay() + #endif + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift new file mode 100644 index 000000000..5996d5089 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Sources/View/PiPHKViewRepresentable.swift @@ -0,0 +1,89 @@ +#if os(iOS) || os(tvOS) || os(visionOS) +import AVFoundation +import SwiftUI + +/// A SwiftUI view that displays using a `PiPHKView`. +public struct PiPHKViewRepresentable: UIViewRepresentable { + /// A type that presents the captured content. + public protocol PreviewSource { + func connect(to view: PiPHKView) + } + + public typealias UIViewType = PiPHKView + + /// Specifies the preview source. + public let previewSource: any PreviewSource + /// Specifies the videoGravity for PiPHKView. + public var videoGravity: AVLayerVideoGravity = .resizeAspect + + private var view = PiPHKView(frame: .zero) + + /// Creates a view representable. + public init(previewSource: any PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) { + self.previewSource = previewSource + self.videoGravity = videoGravity + } + + /// Selects track id for streaming. + public func track(_ id: UInt8?) -> Self { + Task { @MainActor in + await view.selectTrack(id, mediaType: .video) + } + return self + } + + public func makeUIView(context: Context) -> PiPHKView { + previewSource.connect(to: view) + return view + } + + public func updateUIView(_ uiView: PiPHKView, context: Context) { + uiView.videoGravity = videoGravity + } +} + +#else +import AVFoundation +import SwiftUI + +/// A SwiftUI view that displays using a `PiPHKView`. +public struct PiPHKViewRepresentable: NSViewRepresentable { + /// A type that presents the captured content. + public protocol PreviewSource { + func connect(to view: PiPHKView) + } + + public typealias NSViewType = PiPHKView + + /// Specifies the preview source. + public let previewSource: any PreviewSource + /// Specifies the videoGravity for PiPHKView. + public var videoGravity: AVLayerVideoGravity = .resizeAspect + + private var view = PiPHKView(frame: .zero) + + /// Creates a view representable. + public init(previewSource: any PreviewSource, videoGravity: AVLayerVideoGravity = .resizeAspect) { + self.previewSource = previewSource + self.videoGravity = videoGravity + } + + /// Selects track id for streaming. + public func track(_ id: UInt8?) -> Self { + Task { @MainActor in + await view.selectTrack(id, mediaType: .video) + } + return self + } + + public func makeNSView(context: Context) -> PiPHKView { + previewSource.connect(to: view) + return view + } + + public func updateNSView(_ nsView: PiPHKView, context: Context) { + nsView.videoGravity = videoGravity + } +} + +#endif diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift new file mode 100644 index 000000000..f0b705bd7 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/AVAudioPCMBufferFactory.swift @@ -0,0 +1,56 @@ +import AVFoundation +@testable import HaishinKit + +enum AVAudioPCMBufferFactory { + static func makeSinWave(_ sampleRate: Double = 44100, numSamples: Int = 1024, channels: UInt32 = 1) -> AVAudioPCMBuffer? { + var streamDescription = AudioStreamBasicDescription( + mSampleRate: sampleRate, + mFormatID: kAudioFormatLinearPCM, + mFormatFlags: 0xc, + mBytesPerPacket: 2 * channels, + mFramesPerPacket: 1, + mBytesPerFrame: 2 * channels, + mChannelsPerFrame: channels, + mBitsPerChannel: 16, + mReserved: 0 + ) + + guard let format = AVAudioFormat(streamDescription: &streamDescription, channelLayout: AVAudioUtil.makeChannelLayout(channels)) else { + return nil + } + + let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(numSamples))! + buffer.frameLength = buffer.frameCapacity + + let channels = Int(format.channelCount) + let samples = buffer.int16ChannelData![0] + for n in 0.. CMSampleBuffer? { + var asbd = AudioStreamBasicDescription( + mSampleRate: sampleRate, + mFormatID: kAudioFormatLinearPCM, + mFormatFlags: 0xc, + mBytesPerPacket: 2 * channels, + mFramesPerPacket: 1, + mBytesPerFrame: 2 * channels, + mChannelsPerFrame: channels, + mBitsPerChannel: 16, + mReserved: 0 + ) + var formatDescription: CMAudioFormatDescription? + var status: OSStatus = noErr + var blockBuffer: CMBlockBuffer? + let blockSize = numSamples * Int(asbd.mBytesPerPacket) + status = CMBlockBufferCreateWithMemoryBlock( + allocator: nil, + memoryBlock: nil, + blockLength: blockSize, + blockAllocator: nil, + customBlockSource: nil, + offsetToData: 0, + dataLength: blockSize, + flags: 0, + blockBufferOut: &blockBuffer + ) + status = CMAudioFormatDescriptionCreate( + allocator: kCFAllocatorDefault, + asbd: &asbd, + layoutSize: 0, + layout: nil, + magicCookieSize: 0, + magicCookie: nil, + extensions: nil, + formatDescriptionOut: &formatDescription + ) + guard let blockBuffer, status == noErr else { + return nil + } + status = CMBlockBufferFillDataBytes( + with: 0, + blockBuffer: blockBuffer, + offsetIntoDestination: 0, + dataLength: blockSize + ) + guard status == noErr else { + return nil + } + var sampleBuffer: CMSampleBuffer? + status = CMAudioSampleBufferCreateWithPacketDescriptions( + allocator: nil, + dataBuffer: blockBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: formatDescription!, + sampleCount: numSamples, + presentationTimeStamp: presentaionTimeStamp, + packetDescriptions: nil, + sampleBufferOut: &sampleBuffer + ) + guard let sampleBuffer, status == noErr else { + return nil + } + return sampleBuffer + } + + static func makeSinWave(_ sampleRate: Double = 44100, numSamples: Int = 1024, channels: UInt32 = 1) -> CMSampleBuffer? { + var status: OSStatus = noErr + var sampleBuffer: CMSampleBuffer? + var timing = CMSampleTimingInfo( + duration: CMTime(value: 1, timescale: Int32(sampleRate)), + presentationTimeStamp: CMTime.zero, + decodeTimeStamp: CMTime.invalid + ) + + var streamDescription = AudioStreamBasicDescription( + mSampleRate: sampleRate, + mFormatID: kAudioFormatLinearPCM, + mFormatFlags: 0xc, + mBytesPerPacket: 2 * channels, + mFramesPerPacket: 1, + mBytesPerFrame: 2 * channels, + mChannelsPerFrame: channels, + mBitsPerChannel: 16, + mReserved: 0 + ) + + guard let format = AVAudioFormat(streamDescription: &streamDescription, channelLayout: AVAudioUtil.makeChannelLayout(channels)) else { + return nil + } + + status = CMSampleBufferCreate( + allocator: kCFAllocatorDefault, + dataBuffer: nil, + dataReady: false, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: format.formatDescription, + sampleCount: numSamples, + sampleTimingEntryCount: 1, + sampleTimingArray: &timing, + sampleSizeEntryCount: 0, + sampleSizeArray: nil, + sampleBufferOut: &sampleBuffer + ) + + guard status == noErr else { + return nil + } + + let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(numSamples))! + buffer.frameLength = buffer.frameCapacity + + let channels = Int(format.channelCount) + let samples = buffer.int16ChannelData![0] + for n in 0.. CMSampleBuffer? { + var blockBuffer: CMBlockBuffer? + _ = data.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) in + CMBlockBufferCreateWithMemoryBlock( + allocator: kCFAllocatorDefault, + memoryBlock: UnsafeMutableRawPointer(mutating: buffer.baseAddress), + blockLength: data.count, + blockAllocator: kCFAllocatorNull, + customBlockSource: nil, + offsetToData: 0, + dataLength: data.count, + flags: 0, + blockBufferOut: &blockBuffer + ) + } + guard let blockBuffer else { + return nil + } + var timing = CMSampleTimingInfo( + duration: .invalid, + presentationTimeStamp: .invalid, + decodeTimeStamp: .invalid + ) + var sampleBuffer: CMSampleBuffer? + let sampleStatus = CMSampleBufferCreateReady( + allocator: kCFAllocatorDefault, + dataBuffer: blockBuffer, + formatDescription: nil, + sampleCount: 1, + sampleTimingEntryCount: 1, + sampleTimingArray: &timing, + sampleSizeEntryCount: 1, + sampleSizeArray: [data.count], + sampleBufferOut: &sampleBuffer + ) + guard sampleStatus == noErr else { + return nil + } + return sampleBuffer + } + + static func makeSampleBuffer(width: Int, height: Int) -> CMSampleBuffer? { + var pixelBuffer: CVPixelBuffer? + CVPixelBufferCreate(nil, width, height, kCVPixelFormatType_32BGRA, nil, &pixelBuffer) + guard let pixelBuffer else { + return nil + } + var outputFormat: CMFormatDescription? + CMVideoFormatDescriptionCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescriptionOut: &outputFormat + ) + guard let outputFormat else { + return nil + } + var timingInfo = CMSampleTimingInfo() + var sampleBuffer: CMSampleBuffer? + guard CMSampleBufferCreateReadyWithImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &sampleBuffer + ) == noErr else { + return nil + } + return sampleBuffer + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift new file mode 100644 index 000000000..58d4f40b6 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecSettingsFormatTests.swift @@ -0,0 +1,20 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioCodecSettingsFormatTests { + @Test func opus_sampleRate() { + #expect(AudioCodecSettings.Format.opus.makeSampleRate(49000, output: 0) == 48000.0) + #expect(AudioCodecSettings.Format.opus.makeSampleRate(44100, output: 0) == 48000.0) + #expect(AudioCodecSettings.Format.opus.makeSampleRate(20000, output: 0) == 16000.0) + #expect(AudioCodecSettings.Format.opus.makeSampleRate(1000, output: 0) == 8000.0) + } + + @Test func aac_sampleRate() { + #expect(AudioCodecSettings.Format.aac.makeSampleRate(48000, output: 44100) == 44100.0) + #expect(AudioCodecSettings.Format.aac.makeSampleRate(44100, output: 0) == 44100.0) + #expect(AudioCodecSettings.Format.aac.makeSampleRate(20000, output: 0) == 20000.0) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift new file mode 100644 index 000000000..42a1d3620 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Codec/AudioCodecTests.swift @@ -0,0 +1,115 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioCodecTests { + @Test func aac_44100hz_step_1024() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100, numSamples: 1024) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 44100) + } + + @Test func aac_48000hz_step_1024() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(48000.0, numSamples: 1024) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 48000) + } + + @Test func aac_24000hz_step_1024() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(24000.0, numSamples: 1024) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 24000) + } + + @Test func aac_16000hz_step_1024() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(16000.0, numSamples: 1024) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 16000) + } + + @Test func aac_8000hz_step_256() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(8000.0, numSamples: 256) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 8000) + } + + @Test func aac_8000hz_step_960() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(8000.0, numSamples: 960) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 8000) + } + + @Test func aac_44100hz_step_1224() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: 1224) { + encoder.append(sampleBuffer, when: .init()) + } + } + } + + @Test func aac_1_channel_to_2_channel() { + let encoder = HaishinKit.AudioCodec() + encoder.settings = .init(downmix: false, channelMap: [0, 0]) + encoder.startRunning() + for _ in 0..<10 { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: 1024) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.channelCount == 2) + } + + @Test func aac_44100_any_steps() { + let numSamples: [Int] = [1024, 1024, 1028, 1024, 1028, 1028, 962, 962, 960, 2237, 2236] + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + for numSample in numSamples { + if let sampleBuffer = AVAudioPCMBufferFactory.makeSinWave(44100.0, numSamples: numSample) { + encoder.append(sampleBuffer, when: .init()) + } + } + #expect(encoder.outputFormat?.sampleRate == 44100) + } + + @Test func test3Channel_withoutCrash() { + let encoder = HaishinKit.AudioCodec() + encoder.startRunning() + if let sampleBuffer = CMAudioSampleBufferFactory.makeSilence(44100, numSamples: 256, channels: 3) { + encoder.append(sampleBuffer) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift new file mode 100644 index 000000000..2839f7d4a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/CMSampleBuffer+ExtensionTests.swift @@ -0,0 +1,21 @@ +import CoreMedia +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct CMSampleBufferExtensionTests { + @Test func isNotSync() { + if let video1 = CMVideoSampleBufferFactory.makeSampleBuffer(width: 100, height: 100) { + video1.sampleAttachments[0][.notSync] = 1 + } else { + Issue.record() + } + + if let video2 = CMVideoSampleBufferFactory.makeSampleBuffer(width: 100, height: 100) { + #expect(!video2.isNotSync) + } else { + Issue.record() + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift new file mode 100644 index 000000000..096772b27 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/ExpressibleByIntegerLiteral+ExtensionTests.swift @@ -0,0 +1,30 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct ExpressibleByIntegerLiteralTests { + @Test func int32() { + #expect(Int32.min.bigEndian.data == Data([128, 0, 0, 0])) + #expect(Int32(32).bigEndian.data == Data([0, 0, 0, 32])) + #expect(Int32.max.bigEndian.data == Data([127, 255, 255, 255])) + } + + @Test func uint32() { + #expect(UInt32.min.bigEndian.data == Data([0, 0, 0, 0])) + #expect(UInt32(32).bigEndian.data == Data([0, 0, 0, 32])) + #expect(UInt32.max.bigEndian.data == Data([255, 255, 255, 255])) + } + + @Test func int64() { + #expect(Int64.min.bigEndian.data == Data([128, 0, 0, 0, 0, 0, 0, 0])) + #expect(Int64(32).bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 32])) + #expect(Int64.max.bigEndian.data == Data([127, 255, 255, 255, 255, 255, 255, 255])) + } + + @Test func uint64() { + #expect(UInt64.min.bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 0])) + #expect(UInt64(32).bigEndian.data == Data([0, 0, 0, 0, 0, 0, 0, 32])) + #expect(UInt64.max.bigEndian.data == Data([255, 255, 255, 255, 255, 255, 255, 255])) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift new file mode 100644 index 000000000..c1ef797bf --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Extension/SwiftCore+ExtensionTests.swift @@ -0,0 +1,11 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct SwiftCoreExtensionTests { + @Test func int32() { + #expect(Int32.min == Int32(data: Int32.min.data)) + #expect(Int32.max == Int32(data: Int32.max.data)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift new file mode 100644 index 000000000..5554603a4 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ADTSHeaderTests.swift @@ -0,0 +1,11 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct ADTSHeaderTests { + @Test func bytes() { + let data = Data([255, 241, 77, 128, 112, 127, 252, 1]) + _ = ADTSHeader(data: data) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift new file mode 100644 index 000000000..9f87f290c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/AudioSpecificConfigTests.swift @@ -0,0 +1,14 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioSpecificConfigTests { + @Test func bytes() { + #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz48000, channel: .frontCenter).bytes == [0b00001001, 0b10001000]) + #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz44100, channel: .frontCenter).bytes == [0b00001010, 0b00001000]) + #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz24000, channel: .frontCenter).bytes == [0b00001011, 0b00001000]) + #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz16000, channel: .frontCenter).bytes == [0b00001100, 0b00001000]) + #expect(AudioSpecificConfig(type: .aacMain, frequency: .hz8000, channel: .frontCenter).bytes == [0b00001101, 0b10001000]) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift new file mode 100644 index 000000000..f835d6e3d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/ISOTypeBufferUtilTests.swift @@ -0,0 +1,36 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct ISOTypeBufferUtilTests { + @Test func toNALFileFormat_4() { + var data = Data([0, 0, 0, 1, 10, 10, 0, 0, 0, 1, 3, 3, 2, 0, 0, 0, 1, 5, 5, 5]) + ISOTypeBufferUtil.toNALFileFormat(&data) + #expect(data.bytes == Data([0, 0, 0, 2, 10, 10, 0, 0, 0, 3, 3, 3, 2, 0, 0, 0, 3, 5, 5, 5]).bytes) + } + + @Test func toNALFileFormat_3() { + var data = Data([0, 0, 1, 10, 10, 0, 0, 1, 3, 3, 2, 0, 0, 1, 5, 5, 5]) + ISOTypeBufferUtil.toNALFileFormat(&data) + #expect(data.bytes == Data([0, 0, 2, 10, 10, 0, 0, 3, 3, 3, 2, 0, 0, 3, 5, 5, 5]).bytes) + } + + @Test func toNALFileFormat() { + let expected = Data([0, 0, 1, 17, 33, 248, 224, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159]) + var data = expected + data[0] = 0 + data[1] = 0 + data[2] = 0 + data[3] = 1 + ISOTypeBufferUtil.toNALFileFormat(&data) + #expect(data.bytes == expected.bytes) + } + + @Test func toNALFileFormat_3video() { + var data = Data([0, 0, 1, 33, 254, 120, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 70, 125, 76, 125, 95, 250, 168, 44, 255, 85, 5, 159, 234, 160, 160, 250, 147, 253, 84, 22, 127, 170, 130, 195, 235, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 8, 143, 168, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 34, 62, 162, 191, 213, 65, 17, 248, 175, 245, 80, 153, 248, 103, 253, 84, 17, 31, 81, 95, 234, 160, 179, 253, 84, 16, 31, 148, 250, 159, 253, 84, 16, 31, 140, 255, 85, 4, 71, 226, 191, 213, 65, 89, 255, 253, 84, 16, 31, 140, 255, 85, 5, 159, 234, 160, 179, 253, 84, 50, 125, 103, 225, 47, 245, 80, 89, 254, 170, 29, 63, 31, 254, 170, 11, 63, 213, 65, 17, 245, 21, 254, 170, 27, 63, 16, 125, 68, 64, 201, 255, 213, 65, 81, 245, 95, 234, 161, 243, 234, 52, 87, 245, 80, 225, 245, 8, 127, 170, 130, 207, 245, 80, 86, 127, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 67, 199, 212, 199, 226, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 21, 31, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 39, 213, 255, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 224]) + + ISOTypeBufferUtil.toNALFileFormat(&data) + #expect(data.bytes == Data([0, 1, 73, 33, 254, 120, 9, 224, 183, 253, 84, 22, 127, 170, 130, 207, 245, 80, 70, 125, 76, 125, 95, 250, 168, 44, 255, 85, 5, 159, 234, 160, 160, 250, 147, 253, 84, 22, 127, 170, 130, 195, 235, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 8, 143, 168, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 34, 62, 162, 191, 213, 65, 17, 248, 175, 245, 80, 153, 248, 103, 253, 84, 17, 31, 81, 95, 234, 160, 179, 253, 84, 16, 31, 148, 250, 159, 253, 84, 16, 31, 140, 255, 85, 4, 71, 226, 191, 213, 65, 89, 255, 253, 84, 16, 31, 140, 255, 85, 5, 159, 234, 160, 179, 253, 84, 50, 125, 103, 225, 47, 245, 80, 89, 254, 170, 29, 63, 31, 254, 170, 11, 63, 213, 65, 17, 245, 21, 254, 170, 27, 63, 16, 125, 68, 64, 201, 255, 213, 65, 81, 245, 95, 234, 161, 243, 234, 52, 87, 245, 80, 225, 245, 8, 127, 170, 130, 207, 245, 80, 86, 127, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 67, 199, 212, 199, 226, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 21, 31, 175, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 39, 213, 255, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 253, 84, 22, 127, 170, 130, 207, 245, 80, 89, 254, 170, 11, 63, 213, 65, 103, 250, 168, 44, 255, 85, 5, 159, 234, 160, 179, 224]).bytes) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift new file mode 100644 index 000000000..810fa305d --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/ISO/NALUnitReaderTests.swift @@ -0,0 +1,29 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct NALUnitReaderTests { + @Test func `h264 read slice`() { + let data = Data([0, 0, 0, 112, 33, 251, 108, 120, 54, 255, 85, 6, 191, 234, 160, 215, 253, 84, 26, 255, 170, 131, 95, 245, 80, 107, 254, 170, 13, 127, 213, 65, 175, 250, 168, 53, 255, 85, 6, 191, 234, 160, 215, 253, 84, 26, 255, 170, 131, 83, 239, 234, 160, 215, 253, 84, 26, 255, 170, 131, 95, 245, 80, 106, 127, 234, 160, 215, 253, 84, 26, 159, 250, 168, 53, 255, 85, 6, 71, 236, 251, 250, 168, 53, 255, 85, 6, 98, 58, 63, 245, 80, 107, 254, 170, 13, 127, 213, 65, 175, 250, 168, 53, 255, 85, 6, 191, 234, 160, 204, 253, 127, 170, 131, 95, 128]) + let buffer = CMVideoSampleBufferFactory.makeSampleBuffer(data) + let reader = NALUnitReader() + if let buffer { + let nals = reader.read(buffer) + #expect(nals.count == 1) + #expect(nals[0][0] == 33) + } + } + + @Test func `h264 read idr`() { + let data = Data([0, 0, 0, 30, 6, 5, 26, 71, 86, 74, 220, 92, 76, 67, 63, 148, 239, 197, 17, 60, 209, 67, 168, 1, 255, 204, 204, 255, 2, 0, 4, 0, 0, 128, 0, 0, 4, 202, 37, 184, 32, 0, 147, 255, 255, 225, 232, 160, 251, 221, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 110, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 188]) + + let buffer = CMVideoSampleBufferFactory.makeSampleBuffer(data) + buffer?.isNotSync = false + let reader = NALUnitReader() + if let buffer { + let result = reader.read(buffer) + #expect(result.count == 2) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift new file mode 100644 index 000000000..64f81c2de --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioDeviceUnitTests.swift @@ -0,0 +1,16 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioDeviceUnitTests { + @Test func release() { + weak var weakDevice: AudioDeviceUnit? + _ = { + let device = try! AudioDeviceUnit(0, device: AVCaptureDevice.default(for: .audio)!) + weakDevice = device + }() + #expect(weakDevice == nil) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift new file mode 100644 index 000000000..f23007ba2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerByMultiTrackTests.swift @@ -0,0 +1,84 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioMixerByMultiTrackTests { + final class Result: AudioMixerDelegate { + var outputs: [AVAudioPCMBuffer] = [] + var error: AudioMixerError? + + func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) { + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + outputs.append(audioBuffer) + } + + func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) { + self.error = error + } + } + + @Test func keep44100() { + let result = Result() + let mixer = AudioMixerByMultiTrack() + mixer.delegate = result + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + #expect(result.outputs.count == 2) + } + + @Test func test44100to48000() { + let mixer = AudioMixerByMultiTrack() + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.settings = .init( + sampleRate: 48000, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 48000) + } + + @Test func test48000_2ch() { + let result = Result() + let mixer = AudioMixerByMultiTrack() + mixer.delegate = result + mixer.settings = .init( + sampleRate: 48000, channels: 2 + ) + mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!) + #expect(mixer.outputFormat?.channelCount == 2) + #expect(mixer.outputFormat?.sampleRate == 48000) + mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 2)!) + // #expect(result.outputs.count == 2) + // #expect(result.error == nil) + } + + @Test func inputFormats() { + let mixer = AudioMixerByMultiTrack() + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + mixer.append(1, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + let inputFormats = mixer.inputFormats + #expect(inputFormats[0]?.sampleRate == 48000) + #expect(inputFormats[1]?.sampleRate == 44100) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift new file mode 100644 index 000000000..552eb62b2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerBySingleTrackTests.swift @@ -0,0 +1,112 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioMixerBySingleTrackTests { + final class Result: AudioMixerDelegate { + var outputs: [AVAudioPCMBuffer] = [] + + func audioMixer(_ audioMixer: some AudioMixer, track: UInt8, didInput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioFormat: AVAudioFormat) { + } + + func audioMixer(_ audioMixer: some AudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + outputs.append(audioBuffer) + } + + func audioMixer(_ audioMixer: some AudioMixer, errorOccurred error: AudioMixerError) { + } + } + + @Test func keep44100_1ch() { + let mixer = AudioMixerBySingleTrack() + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + } + + @Test func test44100to48000_1ch() { + let mixer = AudioMixerBySingleTrack() + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.settings = .init( + sampleRate: 48000, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 48000) + } + + @Test func test44100to48000_4ch_2ch() { + let result = Result() + let mixer = AudioMixerBySingleTrack() + mixer.delegate = result + mixer.settings = .init( + sampleRate: 44100, channels: 0 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 4)!) + #expect(mixer.outputFormat?.channelCount == 2) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.settings = .init( + sampleRate: 48000, channels: 0 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!) + #expect(mixer.outputFormat?.channelCount == 2) + #expect(mixer.outputFormat?.sampleRate == 48000) + #expect(result.outputs.count == 2) + } + + @Test func test44100to48000_4ch() { + let result = Result() + let mixer = AudioMixerBySingleTrack() + mixer.delegate = result + mixer.settings = .init( + sampleRate: 44100, channels: 0 + ) + mixer.settings.maximumNumberOfChannels = 4 + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 4)!) + #expect(mixer.outputFormat?.channelCount == 4) + #expect(mixer.outputFormat?.sampleRate == 44100) + mixer.settings = .init( + sampleRate: 48000, channels: 0 + ) + mixer.settings.maximumNumberOfChannels = 4 + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 4)!) + #expect(mixer.outputFormat?.channelCount == 4) + #expect(mixer.outputFormat?.sampleRate == 48000) + #expect(result.outputs.count == 2) + } + + @Test func passthrough16000_48000() { + let mixer = AudioMixerBySingleTrack() + mixer.settings = .init( + sampleRate: 0, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 16000) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(mixer.outputFormat?.sampleRate == 44100) + } + + @Test func inputFormats() { + let mixer = AudioMixerBySingleTrack() + mixer.settings = .init( + sampleRate: 44100, channels: 1 + ) + mixer.append(0, buffer: CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + let inputFormats = mixer.inputFormats + #expect(inputFormats[0]?.sampleRate == 48000) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift new file mode 100644 index 000000000..12f28c7ad --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioMixerTrackTests.swift @@ -0,0 +1,64 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite final class AudioMixerTrackTests { + @Test func keep16000() { + let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 16000, channels: 1, interleaved: true)! + let track = AudioMixerTrack(id: 0, outputFormat: format) + track.delegate = self + track.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(track.outputFormat.sampleRate == 16000) + track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(track.outputFormat.sampleRate == 16000) + } + + @Test func keep44100() { + let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44100, channels: 1, interleaved: true)! + let resampler = AudioMixerTrack(id: 0, outputFormat: format) + resampler.delegate = self + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + #expect(resampler.outputFormat.sampleRate == 44100) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(resampler.outputFormat.sampleRate == 44100) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + #expect(resampler.outputFormat.sampleRate == 44100) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024 * 20, channels: 1)!) + #expect(resampler.outputFormat.sampleRate == 44100) + } + + @Test func keep48000() { + let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: true)! + let track = AudioMixerTrack(id: 0, outputFormat: format) + track.delegate = self + track.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024 * 2, channels: 1)!) + } + + @Test func passthrough48000_44100() { + let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44000, channels: 1, interleaved: true)! + let resampler = AudioMixerTrack(id: 0, outputFormat: format) + resampler.delegate = self + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + } + + @Test func passthrough16000_48000() { + let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: true)! + let track = AudioMixerTrack(id: 0, outputFormat: format) + track.delegate = self + track.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!) + #expect(track.outputFormat.sampleRate == 48000) + track.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + } +} + +extension AudioMixerTrackTests: AudioMixerTrackDelegate { + func track(_ track: HaishinKit.AudioMixerTrack, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + } + + func track(_ track: HaishinKit.AudioMixerTrack, errorOccurred error: HaishinKit.AudioMixerError) { + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift new file mode 100644 index 000000000..245b2e6d2 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/AudioRingBufferTests.swift @@ -0,0 +1,85 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct AudioRingBufferTests { + @Test func monoAppendSampleBuffer_920() throws { + try appendSampleBuffer(920, channels: 1) + } + + @Test func monoAppendSampleBuffer_1024() throws { + try appendSampleBuffer(1024, channels: 1) + } + + @Test func monoAppendSampleBuffer_overrun() throws { + let numSamples = 1024 * 4 + var asbd = AudioStreamBasicDescription( + mSampleRate: 44100, + mFormatID: kAudioFormatLinearPCM, + mFormatFlags: 0xc, + mBytesPerPacket: 2, + mFramesPerPacket: 1, + mBytesPerFrame: 2, + mChannelsPerFrame: 1, + mBitsPerChannel: 16, + mReserved: 0 + ) + let format = AVAudioFormat(streamDescription: &asbd) + let buffer = AudioRingBuffer(format!, bufferCounts: 3) // 1024 * 3 + guard + let readBuffer = AVAudioPCMBuffer(pcmFormat: AVAudioFormat(streamDescription: &asbd)!, frameCapacity: AVAudioFrameCount(1024)), + let sinWave = CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: numSamples, channels: 1) else { + return + } + buffer?.append(sinWave) + #expect(buffer?.isDataAvailable(1024) == true) + #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr) + #expect(buffer?.isDataAvailable(1024) == true) + #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr) + #expect(buffer?.isDataAvailable(1024) == true) + #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr) + #expect(buffer?.isDataAvailable(1024) == true) + #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) == noErr) + #expect(buffer?.isDataAvailable(1024) == false) + #expect(buffer?.render(UInt32(1024), ioData: readBuffer.mutableAudioBufferList) != noErr) + } + + @Test func stereoAppendSampleBuffer_920() throws { + try appendSampleBuffer(920, channels: 2) + } + + @Test func stereoAppendSampleBuffer_1024() throws { + try appendSampleBuffer(1024, channels: 2) + } + + private func appendSampleBuffer(_ numSamples: Int, channels: UInt32) throws { + var asbd = AudioStreamBasicDescription( + mSampleRate: 44100, + mFormatID: kAudioFormatLinearPCM, + mFormatFlags: 0xc, + mBytesPerPacket: 2 * channels, + mFramesPerPacket: 1, + mBytesPerFrame: 2 * channels, + mChannelsPerFrame: channels, + mBitsPerChannel: 16, + mReserved: 0 + ) + let format = AVAudioFormat(streamDescription: &asbd) + let buffer = AudioRingBuffer(format!, bufferCounts: 3) + guard + let readBuffer = AVAudioPCMBuffer(pcmFormat: AVAudioFormat(streamDescription: &asbd)!, frameCapacity: AVAudioFrameCount(numSamples)), + let sinWave = CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: numSamples, channels: channels) else { + return + } + let bufferList = UnsafeMutableAudioBufferListPointer(readBuffer.mutableAudioBufferList) + readBuffer.frameLength = AVAudioFrameCount(numSamples) + for _ in 0..<30 { + buffer?.append(sinWave) + readBuffer.int16ChannelData?[0].update(repeating: 0, count: numSamples) + #expect(buffer?.render(UInt32(numSamples), ioData: readBuffer.mutableAudioBufferList) == noErr) + #expect(try sinWave.dataBuffer?.dataBytes().bytes == Data(bytes: bufferList[0].mData!, count: numSamples * Int(channels) * 2).bytes) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift new file mode 100644 index 000000000..b7ba11f2c --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/MediaMixerTests.swift @@ -0,0 +1,53 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite(.disabled(if: TestEnvironment.isCI)) +struct MediaMixerTests { + @Test func videoConfiguration() async throws { + let mixer = MediaMixer() + await #expect(throws: (MediaMixer.Error).self) { + try await mixer.configuration(video: 0) { _ in } + } + try await mixer.attachVideo(AVCaptureDevice.default(for: .video), track: 0) { unit in + #expect(throws: (any Error).self) { + try unit.setFrameRate(60) + } + } + try await mixer.configuration(video: 0) { _ in } + } + + @Test func release() async { + weak var weakMixer: MediaMixer? + _ = await { + let mixer = MediaMixer(captureSessionMode: .manual) + await mixer.startRunning() + try? await Task.sleep(nanoseconds: 1) + await mixer.stopRunning() + try? await Task.sleep(nanoseconds: 1) + weakMixer = mixer + }() + #expect(weakMixer == nil) + } + + @Test func release_with_multimode() async { + weak var weakMixer: MediaMixer? + _ = await { + let mixer = MediaMixer(captureSessionMode: .multi) + await mixer.startRunning() + try? await Task.sleep(nanoseconds: 1) + await mixer.stopRunning() + try? await Task.sleep(nanoseconds: 1) + weakMixer = mixer + }() + #expect(weakMixer == nil) + } + + @Test func currentFrameRate() async throws { + let mixer = MediaMixer() + try await mixer.setFrameRate(60) + #expect(await mixer.frameRate == 60) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift new file mode 100644 index 000000000..7914f4194 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Mixer/VideoDeviceUnitTests.swift @@ -0,0 +1,19 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct VideoDeviceUnitTests { + @Test func release() { + weak var weakDevice: VideoDeviceUnit? + _ = { + guard let videoDevice = AVCaptureDevice.default(for: .video) else { + return + } + let device = try? VideoDeviceUnit(0, device: videoDevice) + weakDevice = device + }() + #expect(weakDevice == nil) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift new file mode 100644 index 000000000..3f08df970 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectContainerTests.swift @@ -0,0 +1,29 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@ScreenActor +@Suite struct ScreenObjectContainerTests { + @Test func lookUpVideoTrackScreenObject() { + let container1 = ScreenObjectContainer() + + let videoTrack1 = VideoTrackScreenObject() + let videoTrack2 = VideoTrackScreenObject() + + try? container1.addChild(videoTrack1) + try? container1.addChild(videoTrack2) + + let videoTracks1 = container1.getScreenObjects() as [VideoTrackScreenObject] + #expect(videoTracks1.count == 2) + + let container2 = ScreenObjectContainer() + let videoTrack3 = VideoTrackScreenObject() + try? container2.addChild(videoTrack3) + try? container1.addChild(container2) + + let videoTracks2 = container1.getScreenObjects() as [VideoTrackScreenObject] + #expect(videoTracks2.count == 3) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift new file mode 100644 index 000000000..b9aa7ee0a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/ScreenObjectTests.swift @@ -0,0 +1,96 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@ScreenActor +@Suite struct ScreenObjectTests { + @Test func screenHorizontalAlignmentRect() { + let screen = Screen() + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.horizontalAlignment = .left + + let object2 = ScreenObject() + object2.size = .init(width: 100, height: 100) + object2.horizontalAlignment = .center + + let object3 = ScreenObject() + object3.size = .init(width: 100, height: 100) + object3.horizontalAlignment = .right + + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + #expect(object1.bounds == .init(origin: .zero, size: object1.size)) + #expect(object2.bounds == .init(x: 750, y: 0, width: 100, height: 100)) + #expect(object3.bounds == .init(x: 1500, y: 0, width: 100, height: 100)) + } + + @Test func screenVerticalAlignmentRect() { + let screen = Screen() + + let object0 = ScreenObject() + object0.size = .zero + object0.verticalAlignment = .top + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.verticalAlignment = .top + + let object2 = ScreenObject() + object2.size = .init(width: 100, height: 100) + object2.verticalAlignment = .middle + + let object3 = ScreenObject() + object3.size = .init(width: 100, height: 100) + object3.verticalAlignment = .bottom + + try? screen.addChild(object0) + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + #expect(object0.bounds == .init(x: 0, y: 0, width: 1600, height: 900)) + #expect(object1.bounds == .init(x: 0, y: 0, width: object1.size.width, height: object1.size.height)) + #expect(object2.bounds == .init(x: 0, y: 400, width: 100, height: 100)) + #expect(object3.bounds == .init(x: 0, y: 800, width: 100, height: 100)) + } + + @Test func screenWithContainerTests() { + let screen = Screen() + + let container = ScreenObjectContainer() + container.size = .init(width: 200, height: 100) + container.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + + let object0 = ScreenObject() + object0.size = .zero + object0.verticalAlignment = .top + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + object1.verticalAlignment = .top + + try? container.addChild(object0) + try? container.addChild(object1) + try? screen.addChild(container) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + #expect(object0.bounds == .init(x: 16, y: 16, width: 200, height: 100)) + #expect(object1.bounds == .init(x: 32, y: 32, width: 100, height: 100)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift new file mode 100644 index 000000000..8460fa056 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Screen/VideoTrackScreenObjectTests.swift @@ -0,0 +1,42 @@ +import AVFoundation +import Foundation +import Testing + +@testable import HaishinKit + +@ScreenActor +@Suite struct VideoTrackObjectContainerTests { + @Test func horizontalAlignmentBounds() { + let screen = Screen() + + let object1 = VideoTrackScreenObject() + object1.videoGravity = .resizeAspect + object1.size = .init(width: 160, height: 90) + object1.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object1.horizontalAlignment = .left + + let object2 = VideoTrackScreenObject() + object2.videoGravity = .resizeAspect + object2.size = .init(width: 160, height: 90) + object2.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object2.horizontalAlignment = .center + + let object3 = VideoTrackScreenObject() + object3.videoGravity = .resizeAspect + object3.size = .init(width: 160, height: 90) + object3.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object3.horizontalAlignment = .right + + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + #expect(object1.bounds == .init(x: 0, y: 0, width: 50.625, height: 90)) + #expect(object2.bounds == .init(x: 774.6875, y: 0, width: 50.625, height: 90)) + #expect(object3.bounds == .init(x: 1549.375, y: 0, width: 50.625, height: 90)) + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift new file mode 100644 index 000000000..f1054e97a --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Stream/StreamRecorderTests.swift @@ -0,0 +1,49 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct StreamRecorderTests { + @Test func startRunning_nil() async throws { + let recorder = StreamRecorder() + try await recorder.startRecording(nil) + let moviesDirectory = await recorder.moviesDirectory + // $moviesDirectory/B644F60F-0959-4F54-9D14-7F9949E02AD8.mp4 + #expect(((await recorder.outputURL?.path.contains(moviesDirectory.path())) != nil)) + } + + @Test func startRunning_fileName() async throws { + let recorder = StreamRecorder() + try? await recorder.startRecording(URL(string: "dir/sample.mp4")) + _ = await recorder.moviesDirectory + // $moviesDirectory/dir/sample.mp4 + #expect(((await recorder.outputURL?.path.contains("dir/sample.mp4")) != nil)) + } + + @Test func startRunning_fullPath() async { + let recorder = StreamRecorder() + let fullPath = await recorder.moviesDirectory.appendingPathComponent("sample.mp4") + // $moviesDirectory/sample.mp4 + try? await recorder.startRecording(fullPath) + #expect(await recorder.outputURL == fullPath) + } + + @Test func startRunning_dir() async { + let recorder = StreamRecorder() + try? await recorder.startRecording(URL(string: "dir")) + // $moviesDirectory/dir/33FA7D32-E0A8-4E2C-9980-B54B60654044.mp4 + #expect(((await recorder.outputURL?.path.contains("dir")) != nil)) + } + + @Test func startRunning_fileAlreadyExists() async { + let recorder = StreamRecorder() + let filePath = await recorder.moviesDirectory.appendingPathComponent("duplicate-file.mp4") + FileManager.default.createFile(atPath: filePath.path, contents: nil) + do { + try await recorder.startRecording(filePath) + fatalError() + } catch { + try? FileManager.default.removeItem(atPath: filePath.path) + } + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift new file mode 100644 index 000000000..413c05d93 --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/TestEnvironment.swift @@ -0,0 +1,7 @@ +import Foundation + +enum TestEnvironment { + static var isCI: Bool { + ProcessInfo.processInfo.environment["CI"] == "true" + } +} diff --git a/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift b/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift new file mode 100644 index 000000000..2c02973fe --- /dev/null +++ b/Vendor/HaishinKit.swift/HaishinKit/Tests/Util/ByteArrayTests.swift @@ -0,0 +1,125 @@ +import Foundation +import Testing + +@testable import HaishinKit + +@Suite struct ByteArrayTests { + @Test func int8() throws { + let bytes = ByteArray() + bytes.writeInt8(Int8.min) + bytes.writeInt8(0) + bytes.writeInt8(Int8.max) + #expect(bytes.position == ByteArray.sizeOfInt8 * 3) + bytes.position = 0 + #expect(try bytes.readInt8() == Int8.min) + #expect(try bytes.readInt8() == 0 ) + #expect(try bytes.readInt8() == Int8.max) + } + + @Test func uint8() throws { + let bytes = ByteArray() + bytes.writeUInt8(UInt8.min) + bytes.writeUInt8(0) + bytes.writeUInt8(UInt8.max) + #expect(bytes.position == ByteArray.sizeOfInt8 * 3) + bytes.position = 0 + #expect(try bytes.readUInt8() == UInt8.min) + #expect(try bytes.readUInt8() == 0) + #expect(try bytes.readUInt8() == UInt8.max) + } + + @Test func int16() throws { + let bytes = ByteArray() + bytes.writeInt16(Int16.min) + bytes.writeInt16(0) + bytes.writeInt16(Int16.max) + print(bytes) + bytes.position = 0 + #expect(try bytes.readInt16() == Int16.min) + #expect(try bytes.readInt16() == 0) + #expect(try bytes.readInt16() == Int16.max) + } + + @Test func uint16() throws { + let bytes = ByteArray() + bytes.writeUInt16(UInt16.min) + bytes.writeUInt16(0) + bytes.writeUInt16(UInt16.max) + bytes.position = 0 + #expect(try bytes.readUInt16() == UInt16.min) + #expect(try bytes.readUInt16() == 0) + #expect(try bytes.readUInt16() == UInt16.max) + } + + @Test func uint24() throws { + let bytes = ByteArray() + bytes.writeUInt24(0xFFFFFF) + bytes.position = 0 + #expect(try bytes.readUInt24() == 0xFFFFFF) + } + + @Test func uint32() throws { + let bytes = ByteArray() + bytes.writeUInt32(UInt32.min) + bytes.writeUInt32(0) + bytes.writeUInt32(UInt32.max) + bytes.position = 0 + #expect(try bytes.readUInt32() == UInt32.min) + #expect(try bytes.readUInt32() == 0) + #expect(try bytes.readUInt32() == UInt32.max) + } + + @Test func int32() throws { + let bytes = ByteArray() + bytes.writeInt32(Int32.min) + bytes.writeInt32(0) + bytes.writeInt32(Int32.max) + bytes.position = 0 + #expect(try bytes.readInt32() == Int32.min) + #expect(try bytes.readInt32() == 0) + #expect(try bytes.readInt32() == Int32.max) + } + + @Test func float() throws { + let bytes = ByteArray() + bytes.writeFloat(Float.infinity) + #expect(bytes.position == ByteArray.sizeOfFloat) + bytes.position = 0 + #expect(try bytes.readFloat() == Float.infinity) + } + + @Test func double() throws { + let bytes = ByteArray() + bytes.writeDouble(.pi) + #expect(bytes.position == ByteArray.sizeOfDouble) + bytes.position = 0 + #expect(try bytes.readDouble() == Double.pi) + bytes.clear() + bytes.writeDouble(Double.infinity) + bytes.position = 0 + #expect(try bytes.readDouble() == Double.infinity) + } + + @Test func utf8() throws { + let bytes = ByteArray() + do { + try bytes.writeUTF8("hello world!!") + } catch { + Issue.record() + } + + let length: Int = bytes.position + bytes.position = 0 + #expect(try bytes.readUTF8() == "hello world!!") + bytes.position = 0 + + var raiseError = false + do { + let _: String = try bytes.readUTF8Bytes(length + 10) + } catch { + raiseError = true + } + + #expect(raiseError) + } +} diff --git a/Vendor/HaishinKit.swift/LICENSE.md b/Vendor/HaishinKit.swift/LICENSE.md new file mode 100644 index 000000000..575a2ee36 --- /dev/null +++ b/Vendor/HaishinKit.swift/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2015, shogo4405 +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h b/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h new file mode 100644 index 000000000..10e9d38c1 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/MoQTHaishinKit.h @@ -0,0 +1,3 @@ +#import +FOUNDATION_EXPORT double MoQTHaishinKitVersionNumber; +FOUNDATION_EXPORT const unsigned char MoQTHaishinKitVersionString[]; diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift new file mode 100644 index 000000000..c1ce97a91 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Constants.swift @@ -0,0 +1,3 @@ +import Logboard + +nonisolated(unsafe) let logger = LBLogger.with("com.haishinkit.SRTHaishinKit") diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift new file mode 100644 index 000000000..726036329 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounce.swift @@ -0,0 +1,28 @@ +import Foundation +import Logboard + +/// 6.21. ANNOUNCE +public struct MoQTAnnounce: MoQTControlMessage { + public let type = MoQTMessageType.announce + public let trackNamespace: [String] + public let subscribeParameters: [MoQTVersionSpecificParameter] + + public var payload: Data { + get throws { + var payload = MoQTPayload() + payload.putInt(trackNamespace.count) + for namespace in trackNamespace { + payload.putString(namespace) + } + payload.putInt(subscribeParameters.count) + for parameter in subscribeParameters { + do { + payload.putData(try parameter.payload) + } catch { + logger.info(error) + } + } + return payload.data + } + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift new file mode 100644 index 000000000..db1db97d3 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTAnnounceError.swift @@ -0,0 +1,29 @@ +import Foundation +import Logboard + +/// 6.21. ANNOUNCE +public struct MoQTAnnounceError: MoQTControlMessage, Swift.Error { + public let type = MoQTMessageType.announceError + public let trackNamespace: [String] + public let code: Int + public let reasonPhrase: String + + public var payload: Data { + get throws { + throw MoQTControlMessageError.notImplemented + } + } +} + +extension MoQTAnnounceError { + init(_ payload: inout MoQTPayload) throws { + let trackNamespaceCounts = try payload.getInt() + var trackNamespace: [String] = .init() + for _ in 0.. (any MoQTControlMessage)? { + switch self { + case .subscribeUpdate: + return nil + case .subscribe: + return try MoQTSubscribe(&payload) + case .subscribeOk: + return try MoQTSubscribeOk(&payload) + case .subscribeError: + return try MoQTSubscribeError(&payload) + case .announce: + return nil + case .announceOk: + return try MoQTAnnounceOk(&payload) + case .announceError: + return try MoQTAnnounceError(&payload) + case .unannounce: + return nil + case .unsubscribe: + return nil + case .subscribeDone: + return nil + case .announceCancel: + return nil + case .trackStatusRequest: + return nil + case .trackStatus: + return nil + case .goaway: + return nil + case .subscribeAnnounuces: + return nil + case .subscribeAnnounucesOk: + return try MoQTSubscribeAnnouncesOk(&payload) + case .subscribeAnnounucesError: + return try MoQTSubscribeAnnouncesError(&payload) + case .clientSetup: + return nil + case .serverSetup: + return try MoQTServerSetup(&payload) + } + } +} + +enum MoQTControlMessageError: Swift.Error { + case notImplemented +} + +public protocol MoQTControlMessage: Sendable { + var type: MoQTMessageType { get } + var payload: Data { get throws } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift new file mode 100644 index 000000000..f615c22a2 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTGoaway.swift @@ -0,0 +1,20 @@ +import Foundation + +public struct MoQTGoaway: MoQTControlMessage { + public let type: MoQTMessageType = .goaway + public let newSessionURI: String + + public var payload: Data { + get throws { + var payload = MoQTPayload() + payload.putString(newSessionURI) + return payload.data + } + } +} + +extension MoQTGoaway { + init(_ payload: inout MoQTPayload) throws { + newSessionURI = try payload.getString() + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift new file mode 100644 index 000000000..8aa84853e --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/ControlMessage/MoQTServerSetup.swift @@ -0,0 +1,29 @@ +import Foundation + +public struct MoQTServerSetup: MoQTControlMessage { + public let type: MoQTMessageType = .serverSetup + public let selectedVersion: Int + public let setupParameters: [MoQTSetupParameter] + + public var payload: Data { + get throws { + throw MoQTControlMessageError.notImplemented + } + } +} + +extension MoQTServerSetup { + init(_ payload: inout MoQTPayload) throws { + selectedVersion = try payload.getInt() + let setupParametersCounts = try payload.getInt() + var setupParameters: [MoQTSetupParameter] = .init() + for _ in 0...size) + } + } + + init(data: Data) { + let diff: Int = MemoryLayout.size - data.count + if 0 < diff { + var buffer = Data(repeating: 0, count: diff) + buffer.append(data) + self = buffer.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee } + return + } + self = data.withUnsafeBytes { $0.baseAddress!.assumingMemoryBound(to: Self.self).pointee } + } + + init(data: Slice) { + self.init(data: Data(data)) + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift new file mode 100644 index 000000000..0abfc0b39 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/Extension/NWProtocolQUIC.Options+Extension.swift @@ -0,0 +1,12 @@ +import Network + +@available(iOS 16.0, macOS 13.0, tvOS 16.0, *) +extension NWProtocolQUIC.Options { + func verifySelfCert() -> NWProtocolQUIC.Options { + let securityProtocolOptions: sec_protocol_options_t = self.securityProtocolOptions + sec_protocol_options_set_verify_block(securityProtocolOptions, { (_: sec_protocol_metadata_t, _: sec_trust_t, complete: @escaping sec_protocol_verify_complete_t) in + complete(true) + }, .main) + return self + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift new file mode 100644 index 000000000..dab7a1b9a --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTConnection.swift @@ -0,0 +1,240 @@ +import Foundation + +@available(iOS 16.0, macOS 13.0, tvOS 16.0, *) +public actor MoQTConnection { + public static let defaultPort = 4433 + /// The supported protocols are moqt. + public static let supportedProtocols = ["moqt"] + /// The supported protocol versions. + public static let supportedVersions: [MoQTVersion] = [.draft07Exp2] + /// The default a control request time out value (ms). + public static let defaultRequestTimeout: UInt64 = 3000 + + /// The error domain code. + public enum Error: Swift.Error { + /// An invalid internal stare. + case invalidState + /// The command isn’t supported. + case unsupportedCommand(_ command: String) + /// The connected operation timed out. + case connectionTimedOut + /// The general socket error. + case socketErrorOccurred(_ error: any Swift.Error) + /// The requested operation timed out. + case requestTimedOut + case unknownResponse + } + + public let role: MoQTSetupRole + /// The control message request timeout value. Defaul value is 500 msec. + public let requestTimeout: UInt64 + + public var objectStream: AsyncStream { + AsyncStream { continuation in + self.objectStreamContinuation = continuation + } + } + + private var socket: MoQTSocket? + private var inputBuffer = MoQTPayload() + private var outputBuffer = MoQTPayload() + private var datagramBuffer = MoQTPayload() + private var continuation: CheckedContinuation? + private var currentTrackAlias = 0 + private var currentSubscribeId = 0 + private var objectStreamContinuation: AsyncStream.Continuation? + + /// Creates a new connection. + public init(_ role: MoQTSetupRole, requestTimeOut: UInt64 = MoQTConnection.defaultRequestTimeout) { + self.role = .subscriber + self.requestTimeout = requestTimeOut + } + + /// Creates a two-way connection to an application on MoQT Server. + public func connect(_ uri: String) async throws -> MoQTServerSetup { + guard let uri = URL(string: uri), let scheme = uri.scheme, let host = uri.host, Self.supportedProtocols.contains(scheme) else { + throw Error.unsupportedCommand(uri) + } + socket = .init() + guard let socket else { + throw Error.invalidState + } + do { + try await socket.connect(host, port: uri.port ?? Self.defaultPort) + Task { + for await data in await socket.incoming { + await didReceiveControlMessage(data) + } + } + Task { + for await data in await socket.datagram { + await didReceiveDataStream(data) + } + } + guard let serverSetup = try await send(MoQTClientSetup(supportedVersions: Self.supportedVersions, role: role, path: uri.path())) as? MoQTServerSetup else { + throw Error.unknownResponse + } + return serverSetup + } catch { + logger.error(error) + throw error + } + } + + public func annouce(_ namespace: [String], authInfo: String?) async throws -> Result { + var subscribeParameters: [MoQTVersionSpecificParameter] = .init() + if let authInfo { + subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo)) + } + let message = MoQTAnnounce(trackNamespace: namespace, subscribeParameters: subscribeParameters) + switch try await send(message) { + case let result as MoQTAnnounceOk: + return .success(result) + case let result as MoQTAnnounceError: + return .failure(result) + default: + throw Error.unknownResponse + } + } + + public func subscribe(_ namespace: [String], name: String, authInfo: String? = nil) async throws -> Result { + defer { + currentTrackAlias += 1 + currentSubscribeId += 1 + } + var subscribeParameters: [MoQTVersionSpecificParameter] = .init() + if let authInfo { + subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo)) + } + let message = MoQTSubscribe( + subscribeId: currentSubscribeId, + trackAlias: currentTrackAlias, + trackNamespace: namespace, + trackName: name, + subscribePriority: 0, + groupOrder: .descending, + filterType: .latestGroup, + startGroup: nil, + startObject: nil, + endGroup: nil, + endObject: nil, + subscribeParameters: subscribeParameters + ) + switch try await send(message) { + case let result as MoQTSubscribeOk: + return .success(result) + case let result as MoQTSubscribeError: + return .failure(result) + default: + throw Error.unknownResponse + } + } + + public func subscribeAnnouces(_ namespace: [String], authInfo: String? = nil) async throws -> Result { + var subscribeParameters: [MoQTVersionSpecificParameter] = .init() + if let authInfo { + subscribeParameters.append(.init(key: .authorizationInfo, value: authInfo)) + } + let message = MoQTSubscribeAnnounces( + trackNamespacePrefix: namespace, + parameters: subscribeParameters + ) + switch try await send(message) { + case let result as MoQTSubscribeAnnouncesOk: + return .success(result) + case let result as MoQTSubscribeAnnouncesError: + return .failure(result) + default: + throw Error.unknownResponse + } + } + + /// Closes the connection from the server. + public func close() async { + await socket?.close() + } + + public func send(_ objects: [MoQTObject], header: MoQTStreamHeaderSubgroup) async throws { + var buffer = MoQTPayload() + buffer.putData(try header.payload) + for object in objects { + buffer.putData(try object.payload) + } + buffer.position = 0 + await socket?.sendDatagram(buffer.data) + } + + private func send(_ message: some MoQTControlMessage) async throws -> any MoQTControlMessage { + let content = try message.payload + outputBuffer.position = 0 + outputBuffer.putInt(message.type.rawValue) + outputBuffer.putInt(content.count) + outputBuffer.putData(content) + return try await withCheckedThrowingContinuation { continutation in + self.continuation = continutation + Task { + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + self.continuation.map { + $0.resume(throwing: Error.requestTimedOut) + } + self.continuation = nil + } + Task { + await socket?.send(outputBuffer.data) + } + } + } + + private func didReceiveControlMessage(_ data: Data) async { + do { + inputBuffer.position = 0 + inputBuffer.putData(data) + inputBuffer.position = 0 + let type = try inputBuffer.getInt() + let length = try inputBuffer.getInt() + guard let message = try MoQTMessageType(rawValue: type)?.makeMessage(&inputBuffer) else { + _ = try? inputBuffer.getData(length) + continuation?.resume(throwing: MoQTControlMessageError.notImplemented) + continuation = nil + return + } + switch message { + case let message as MoQTSubscribe: + let ok = MoQTSubscribeOk( + subscribeId: currentSubscribeId, + expires: 0, + groupOrder: message.groupOrder, + contentExists: true, + largestGroupId: 0, + largestObjectId: 0, + subscribeParameters: message.subscribeParameters) + _ = try? await send(ok) + default: + continuation?.resume(returning: message) + continuation = nil + } + } catch { + logger.warn(error, data.bytes) + } + } + + private func didReceiveDataStream(_ data: Data) async { + do { + datagramBuffer.position = 0 + datagramBuffer.putData(data) + datagramBuffer.position = 0 + let type = try datagramBuffer.getInt() + switch MoQTDataStreamType(rawValue: type) { + case .streamHeaderSubgroup: + _ = try MoQTStreamHeaderSubgroup(&datagramBuffer) + while 0 < datagramBuffer.bytesAvailable { + objectStreamContinuation?.yield(try .init(&datagramBuffer)) + } + default: + break + } + } catch { + logger.warn(error) + } + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift new file mode 100644 index 000000000..5e0f817d5 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTPayload.swift @@ -0,0 +1,133 @@ +import Foundation + +struct MoQTPayload { + private(set) var data = Data() + + enum Error: Swift.Error { + case eof + case outOfRange + } + + /// Specifies the length of buffer. + var length: Int { + get { + data.count + } + set { + switch true { + case (data.count < newValue): + data.append(Data(count: newValue - data.count)) + case (newValue < data.count): + data = data.subdata(in: 0.. Self { + if value <= 63 { + return putData(UInt8(value).bigEndian.data) + } + if value <= 16383 { + return putData((UInt16(value) | 0x4000).bigEndian.data) + } + if value <= 1073741823 { + return putData((UInt32(value) | 0x80000000).bigEndian.data) + } + return putData((UInt64(value) | 0xc000000000000000).bigEndian.data) + } + + mutating func getInt() throws -> Int { + guard 1 <= bytesAvailable else { + throw Error.eof + } + switch Int(data[position] >> 6) { + case 0: + defer { + position += 1 + } + return Int(data: data[position.. Self { + putInt(value.utf8.count) + putData(Data(value.utf8)) + return self + } + + mutating func getString() throws -> String { + let length = try getInt() + let data = try getData(length) + return String(data: data, encoding: .utf8) ?? "" + } + + mutating func putBool(_ value: Bool) -> Self { + putData(Data([value ? 1 : 0])) + return self + } + + mutating func getBool() throws -> Bool { + guard 1 <= bytesAvailable else { + throw Error.eof + } + let value = try getData(1) + return value[0] == 1 + } + + @discardableResult + mutating func putData(_ value: Data) -> Self { + if position == data.count { + data.append(value) + position = data.count + return self + } + let length = min(data.count - position, value.count) + data.replaceSubrange(position.. Data { + guard length <= bytesAvailable else { + throw Error.eof + } + position += length + return data.subdata(in: position - length.. { + AsyncStream { continuation in + self.incomingContinuation = continuation + } + } + + var datagram: AsyncStream { + AsyncStream { continuation in + self.datagramContinuation = continuation + } + } + + private var timeout: UInt64 = 15 + private var connected = false + private var windowSizeC = MoQTSocket.defaultWindowSizeC + private var totalBytesIn = 0 + private var queueBytesOut = 0 + private var totalBytesOut = 0 + private var connection: NWConnection? { + didSet { + connection?.stateUpdateHandler = { state in + Task { await self.stateDidChange(to: state) } + } + connection?.viabilityUpdateHandler = { viability in + Task { await self.viabilityDidChange(to: viability) } + } + } + } + private var options: NWProtocolQUIC.Options = .init() + private var outputs: AsyncStream.Continuation? + private var connectionGroup: NWConnectionGroup? { + didSet { + connectionGroup?.newConnectionHandler = { connection in + Task { await self.newConnection(connection) } + } + oldValue?.newConnectionHandler = nil + oldValue?.stateUpdateHandler = nil + } + } + private var continuation: CheckedContinuation? + private var qualityOfService: DispatchQoS = .userInitiated + private var incomingContinuation: AsyncStream.Continuation? { + didSet { + if let connection, let incomingContinuation { + receive(on: connection, continuation: incomingContinuation) + } + } + } + private var datagramContinuation: AsyncStream.Continuation? + private lazy var networkQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MoQSocket.network", qos: qualityOfService) + + func connect(_ name: String, port: Int) async throws { + guard !connected else { + throw Error.invalidState + } + totalBytesIn = 0 + totalBytesOut = 0 + queueBytesOut = 0 + do { + let options = NWProtocolQUIC.Options(alpn: Self.alpn).verifySelfCert() + let endpoint = NWEndpoint.hostPort(host: .init(name), port: .init(integerLiteral: NWEndpoint.Port.IntegerLiteralType(port))) + connection = NWConnection(to: endpoint, using: NWParameters(quic: options)) + options.isDatagram = true + connectionGroup = NWConnectionGroup(with: NWMultiplexGroup(to: endpoint), using: NWParameters(quic: options)) + try await withCheckedThrowingContinuation { (checkedContinuation: CheckedContinuation) in + self.continuation = checkedContinuation + Task { + try? await Task.sleep(nanoseconds: timeout * 1_000_000_000) + guard let continuation else { + return + } + continuation.resume(throwing: Error.connectionTimedOut) + self.continuation = nil + close() + } + connection?.start(queue: networkQueue) + } + } catch { + throw error + } + } + + func send(_ data: Data) { + guard connected else { + return + } + queueBytesOut += data.count + outputs?.yield(data) + } + + func sendDatagram(_ data: Data) { + connectionGroup?.send(content: data) { _ in + } + } + + func close(_ error: NWError? = nil) { + guard connection != nil else { + return + } + if let continuation { + continuation.resume(throwing: Error.connectionNotEstablished(error)) + self.continuation = nil + } + connected = false + outputs = nil + connection = nil + continuation = nil + } + + private func newConnection(_ connection: NWConnection) { + receive(on: connection, continuation: datagramContinuation) + connection.start(queue: networkQueue) + } + + private nonisolated func receive(on connection: NWConnection, continuation: AsyncStream.Continuation?) { + connection.receive(minimumIncompleteLength: 0, maximumLength: 65558) { content, _, _, _ in + if let content { + continuation?.yield(content) + self.receive(on: connection, continuation: continuation) + } + } + } + + private func stateDidChange(to state: NWConnection.State) { + switch state { + case .ready: + logger.info("Connection is ready.") + connected = true + let (stream, continuation) = AsyncStream.makeStream() + Task { + for await data in stream where connected { + try await send(data) + totalBytesOut += data.count + queueBytesOut -= data.count + } + } + self.outputs = continuation + self.connectionGroup?.start(queue: networkQueue) + self.continuation?.resume() + self.continuation = nil + case .waiting(let error): + logger.warn("Connection waiting:", error) + close(error) + case .setup: + logger.debug("Connection is setting up.") + case .preparing: + logger.debug("Connection is preparing.") + case .failed(let error): + logger.warn("Connection failed:", error) + close(error) + case .cancelled: + logger.info("Connection cancelled.") + close() + @unknown default: + logger.error("Unknown connection state.") + } + } + + private func viabilityDidChange(to viability: Bool) { + logger.info("Connection viability changed to ", viability) + if viability == false { + close() + } + } + + private func send(_ data: Data) async throws { + return try await withCheckedThrowingContinuation { continuation in + guard let connection else { + continuation.resume(throwing: Error.invalidState) + return + } + connection.send(content: data, completion: .contentProcessed { error in + if let error { + continuation.resume(throwing: error) + return + } + continuation.resume() + }) + } + } +} + +@available(iOS 16.0, macOS 13.0, tvOS 16.0, *) +extension MoQTSocket: NetworkTransportReporter { + // MARK: NetworkTransportReporter + func makeNetworkMonitor() async -> NetworkMonitor { + return .init(self) + } + + func makeNetworkTransportReport() -> NetworkTransportReport { + return .init(queueBytesOut: queueBytesOut, totalBytesIn: totalBytesIn, totalBytesOut: totalBytesOut) + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift new file mode 100644 index 000000000..6deb2bad1 --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Sources/MoQTVersion.swift @@ -0,0 +1,12 @@ +public enum MoQTVersion: Int, Sendable { + case draft01 = 0xff000001 + case draft02 = 0xff000002 + case draft03 = 0xff000003 + case draft04 = 0xff000004 + case draft05 = 0xff000005 + case draft06 = 0xff000006 + case draft07 = 0xff000007 + + case draft07Exp = 0xff070001 + case draft07Exp2 = 0xff070002 +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift new file mode 100644 index 000000000..6b97cac7c --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTPayLoadTests.swift @@ -0,0 +1,14 @@ +import Foundation +@testable import MoQTHaishinKit +import Testing + +@Suite struct MoQTPayLoadTests { + @Test func putInt() throws { + var payload = MoQTPayload() + payload.putInt(MoQTVersion.draft04.rawValue) + #expect(payload.data == Data([192, 0, 0, 0, 255, 0, 0, 4])) + payload.position = 0 + #expect(try payload.getInt() == MoQTVersion.draft04.rawValue) + #expect(payload.position == 8) + } +} diff --git a/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift new file mode 100644 index 000000000..2718184bd --- /dev/null +++ b/Vendor/HaishinKit.swift/MoQTHaishinKit/Tests/MoQTStreamHeaderSubgroupTests.swift @@ -0,0 +1,19 @@ +import Foundation +@testable import MoQTHaishinKit +import Testing + +@Suite struct MoQTStreamHeaderSubgroupTests { + @Test func parse() throws { + var payload = MoQTPayload() + payload.putData(Data([4, 64, 99, 0, 129, 184, 103, 39, 0, 0, 0, 17, 50, 48, 50, 52, 45, 49, 49, 45, 49, 54, 32, 49, 52, 58, 50, 55, 58, 1, 1, 48, 2, 1, 49, 3, 1, 50, 4, 1, 51, 5, 1, 52, 6, 1, 53, 7, 1, 54, 8, 1, 55, 9, 1, 56, 10, 1, 57, 11, 2, 49, 48, 12, 2, 49, 49, 13, 2, 49, 50, 14, 2, 49, 51, 15, 2, 49, 52, 16, 2, 49, 53, 17, 2, 49, 54, 18, 2, 49, 55, 19, 2, 49, 56, 20, 2, 49, 57, 21, 2, 50, 48, 22, 2, 50, 49, 23, 2, 50, 50, 24, 2, 50, 51, 25, 2, 50, 52, 26, 2, 50, 53, 27, 2, 50, 54, 28, 2, 50, 55, 29, 2, 50, 56, 30, 2, 50, 57, 31, 2, 51, 48, 32, 2, 51, 49, 33, 2, 51, 50, 34, 2, 51, 51, 35, 2, 51, 52, 36, 2, 51, 53, 37, 2, 51, 54, 38, 2, 51, 55, 39, 2, 51, 56, 40, 2, 51, 57, 41, 2, 52, 48, 42, 2, 52, 49, 43, 2, 52, 50, 44, 2, 52, 51, 45, 2, 52, 52, 46, 2, 52, 53, 47, 2, 52, 54, 48, 2, 52, 55, 49, 2, 52, 56, 50, 2, 52, 57, 51, 2, 53, 48])) + payload.position = 1 + let message = try MoQTStreamHeaderSubgroup(&payload) + #expect(message.trackAlias == 99) + #expect(message.groupId == 0) + var objects: [MoQTObject] = .init() + while 0 < payload.bytesAvailable { + objects.append(try MoQTObject(&payload)) + } + #expect(objects.last?.id == 51) + } +} diff --git a/Vendor/HaishinKit.swift/Package.resolved b/Vendor/HaishinKit.swift/Package.resolved new file mode 100644 index 000000000..4ed8b61e4 --- /dev/null +++ b/Vendor/HaishinKit.swift/Package.resolved @@ -0,0 +1,33 @@ +{ + "originHash" : "0e215ed38a2c303f72c8fca29cc88c3ca1dfd9563d350725303c79aad85ae387", + "pins" : [ + { + "identity" : "logboard", + "kind" : "remoteSourceControl", + "location" : "https://github.com/shogo4405/Logboard.git", + "state" : { + "revision" : "8f41c63afb903040b77049ee2efa8c257b8c0d50", + "version" : "2.6.0" + } + }, + { + "identity" : "swift-docc-plugin", + "kind" : "remoteSourceControl", + "location" : "https://github.com/swiftlang/swift-docc-plugin", + "state" : { + "revision" : "3e4f133a77e644a5812911a0513aeb7288b07d06", + "version" : "1.4.5" + } + }, + { + "identity" : "swift-docc-symbolkit", + "kind" : "remoteSourceControl", + "location" : "https://github.com/swiftlang/swift-docc-symbolkit", + "state" : { + "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34", + "version" : "1.0.0" + } + } + ], + "version" : 3 +} diff --git a/Vendor/HaishinKit.swift/Package.swift b/Vendor/HaishinKit.swift/Package.swift new file mode 100644 index 000000000..fb08bcb73 --- /dev/null +++ b/Vendor/HaishinKit.swift/Package.swift @@ -0,0 +1,106 @@ +// swift-tools-version:6.0 +// The swift-tools-version declares the minimum version of Swift required to build this package. +import PackageDescription + +#if swift(<6) +let swiftSettings: [SwiftSetting] = [ + .enableExperimentalFeature("ExistentialAny"), + .enableExperimentalFeature("StrictConcurrency") +] +#else +let swiftSettings: [SwiftSetting] = [ + .enableUpcomingFeature("ExistentialAny") +] +#endif + +let package = Package( + name: "HaishinKit", + platforms: [ + .iOS(.v15), + .tvOS(.v15), + .macCatalyst(.v15), + .macOS(.v12), + .visionOS(.v1) + ], + products: [ + .library(name: "HaishinKit", targets: ["HaishinKit"]), + .library(name: "RTMPHaishinKit", targets: ["RTMPHaishinKit"]), + .library(name: "SRTHaishinKit", targets: ["SRTHaishinKit"]), + .library(name: "MoQTHaishinKit", targets: ["MoQTHaishinKit"]), + .library(name: "RTCHaishinKit", targets: ["RTCHaishinKit"]) + ], + dependencies: [ + .package(url: "https://github.com/swiftlang/swift-docc-plugin", from: "1.4.5"), + .package(url: "https://github.com/shogo4405/Logboard.git", "2.6.0"..<"2.7.0") + ], + targets: [ + .binaryTarget( + name: "libsrt", + url: "https://github.com/HaishinKit/libsrt-xcframework/releases/download/v1.5.4/libsrt.xcframework.zip", + checksum: "76879e2802e45ce043f52871a0a6764d57f833bdb729f2ba6663f4e31d658c4a" + ), + .binaryTarget( + name: "libdatachannel", + url: "https://github.com/HaishinKit/libdatachannel-xcframework/releases/download/v0.24.0/libdatachannel.xcframework.zip", + checksum: "52163eed2c9d652d913b20d1fd5a1925c5982b1dcdf335fd916c72ffa385bb26" + ), + .target( + name: "HaishinKit", + dependencies: ["Logboard"], + path: "HaishinKit/Sources", + swiftSettings: swiftSettings + ), + .target( + name: "RTMPHaishinKit", + dependencies: ["HaishinKit"], + path: "RTMPHaishinKit/Sources", + swiftSettings: swiftSettings + ), + .target( + name: "SRTHaishinKit", + dependencies: ["libsrt", "HaishinKit"], + path: "SRTHaishinKit/Sources", + swiftSettings: swiftSettings + ), + .target( + name: "MoQTHaishinKit", + dependencies: ["HaishinKit"], + path: "MoQTHaishinKit/Sources", + swiftSettings: swiftSettings + ), + .target( + name: "RTCHaishinKit", + dependencies: ["libdatachannel", "HaishinKit"], + path: "RTCHaishinKit/Sources", + swiftSettings: swiftSettings + ), + .testTarget( + name: "HaishinKitTests", + dependencies: ["HaishinKit"], + path: "HaishinKit/Tests", + resources: [ + .process("Asset") + ], + swiftSettings: swiftSettings + ), + .testTarget( + name: "RTMPHaishinKitTests", + dependencies: ["RTMPHaishinKit"], + path: "RTMPHaishinKit/Tests", + swiftSettings: swiftSettings + ), + .testTarget( + name: "SRTHaishinKitTests", + dependencies: ["SRTHaishinKit"], + path: "SRTHaishinKit/Tests", + swiftSettings: swiftSettings + ), + .testTarget( + name: "RTCHaishinKitTests", + dependencies: ["RTCHaishinKit"], + path: "RTCHaishinKit/Tests", + swiftSettings: swiftSettings + ) + ], + swiftLanguageModes: [.v6, .v5] +) diff --git a/Vendor/HaishinKit.swift/README.md b/Vendor/HaishinKit.swift/README.md new file mode 100644 index 000000000..98c2a47b3 --- /dev/null +++ b/Vendor/HaishinKit.swift/README.md @@ -0,0 +1,107 @@ +# HaishinKit for iOS, macOS, tvOS, visionOS and [Android](https://github.com/HaishinKit/HaishinKit.kt). +[![GitHub Stars](https://img.shields.io/github/stars/HaishinKit/HaishinKit.swift?style=social)](https://github.com/HaishinKit/HaishinKit.swift/stargazers) +[![Release](https://img.shields.io/github/v/release/HaishinKit/HaishinKit.swift)](https://github.com/HaishinKit/HaishinKit.swift/releases/latest) +[![Platform Compatibility](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FHaishinKit%2FHaishinKit.swift%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/HaishinKit/HaishinKit.swift) +[![Swift Compatibility](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2FHaishinKit%2FHaishinKit.swift%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/HaishinKit/HaishinKit.swift) +[![GitHub license](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://raw.githubusercontent.com/HaishinKit/HaishinKit.swift/master/LICENSE.md) +[![GitHub Sponsor](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=ff69b4)](https://github.com/sponsors/shogo4405) + +* Camera and Microphone streaming library via RTMP and SRT for iOS, macOS, tvOS and visionOS. +* 10th Anniversary🎖️In development for 10 years, with 2,778 commits and 163 releases. Thank you. Since Aug 2, 2015. + +## 💖 Sponsors +Do you need additional support? Technical support on Issues and Discussions is provided only to contributors and academic researchers of HaishinKit. By becoming a sponsor, I can provide the support you need. + +Sponsor: [$50 per month](https://github.com/sponsors/shogo4405): Technical support via GitHub Issues/Discussions with priority response. + +## 🎨 Features +- **Protocols** ✨Publish and playback feature are available [RTMP](RTMPHaishinKit/Sources/Docs.docc/index.md), [SRT](SRTHaishinKit/Sources/Docs.docc/index.md) and [WHEP/WHIP(alpha)](RTCHaishinKit/Sources/Docs.docc/index.md). +- **Multi Camera access** ✨[Support multitasking camera access.](https://developer.apple.com/documentation/avkit/accessing-the-camera-while-multitasking-on-ipad) +- **Multi Streaming** ✨Allowing live streaming to separate services. Views also support this, enabling the verification of raw video data. +- **Strict Concurrency** ✨Supports Swift's Strict Concurrency compliance. +- **Screen Capture** ✨Supports ReplayKit(iOS) and ScreenCaptureKit(macOS) api. +- **Video mixing** ✨Possible to display any text or bitmap on a video during broadcasting or viewing. This allows for various applications such as watermarking and time display. + |Publish|Playback| + |:---:|:---:| + ||| + +## 🌏 Requirements + +### Development +|Version|Xcode|Swift| +|:----:|:----:|:----:| +|2.2.0+|26.0+|6.0+| +|2.1.0+|16.4+|6.0+| + +### OS +|iOS|tvOS|Mac Catalyst|macOS|visionOS|watchOS| +|:-:|:-:|:-:|:-:|:-:|:-:| +|15.0+|15.0+|15.0+|12.0+|1.0+|-| + +- SRTHaishinKit is not avaliable for Mac Catalyst. + +## 📖 Getting Started + +> [!IMPORTANT] +> There are several issues that occur when connected to Xcode. Please also refer to [this document](https://github.com/HaishinKit/HaishinKit.swift/blob/main/HaishinKit/Sources/Docs.docc/known-issue.md). + +### 🔧 Examples +- Reference implementation app for live streaming `publish` and `playback`. +- If an issue occurs, please check whether it also happens in the examples app. + +#### Usage + +You can verify by changing the URL of the following file. +https://github.com/HaishinKit/HaishinKit.swift/blob/abf1883d25d0ba29e1d1d67ea9e3a3b5be61a196/Examples/Preference.swift#L1-L7 + +#### Download +```sh +git clone https://github.com/HaishinKit/HaishinKit.swift.git +cd HaishinKit.swift +open Examples/Examples.xcodeproj +``` + +### 🔧 Installation +#### Using Swift Package Manager +```sh +https://github.com/HaishinKit/HaishinKit.swift +``` + +### 🔧 Prerequisites + +#### AVAudioSession +Make sure you setup and activate your AVAudioSession iOS. + +```swift +import AVFoundation + +let session = AVAudioSession.sharedInstance() +do { + try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth]) + try session.setActive(true) +} catch { + print(error) +} +``` + +### 🔧 Cocoa Keys +Please make sure to contains `Info.plist` the following values when accessing the camera or microphone. +```xml +NSCameraUsageDescription +your usage description here +NSMicrophoneUsageDescription +your usage description here +``` + +## 📃 Documentation +- [API Documentation](https://docs.haishinkit.com/swift/latest/documentation/) +- [Migration Guide](https://github.com/HaishinKit/HaishinKit.swift/wiki#-migration-guide) + +## 🌏 Related projects +Project name |Notes |License +----------------|------------|-------------- +[HaishinKit for Android.](https://github.com/HaishinKit/HaishinKit.kt)|Camera and Microphone streaming library via RTMP for Android.|[BSD 3-Clause "New" or "Revised" License](https://github.com/HaishinKit/HaishinKit.kt/blob/master/LICENSE.md) +[HaishinKit for Flutter.](https://github.com/HaishinKit/HaishinKit.dart)|Camera and Microphone streaming library via RTMP for Flutter.|[BSD 3-Clause "New" or "Revised" License](https://github.com/HaishinKit/HaishinKit.dart/blob/master/LICENSE.md) + +## 📜 License +BSD-3-Clause diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md new file mode 100644 index 000000000..cf3245970 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Docs.docc/index.md @@ -0,0 +1,27 @@ +# ``RTCHaishinKit`` +This module supports WHIP/WHEP protocols. + +## 🔍 Overview +RTCHaishinKit is WHIP/WHEP protocols stack in Swift. It internally uses a library that is built from [libdatachannel](https://github.com/paullouisageneau/libdatachannel) and converted into an xcframework. + +## 🎨 Features +- Publish(WHIP) + - H264 and OPUS support. +- Playback(WHEP) + - H264 and OPUS support. + +## 📓 Usage +### Logging +- Defining a Swift wrapper method for `rtcInitLogger`. +```swift +await RTCLogger.shared.setLevel(.debug) +``` + +### Session +Currently designed to work with the Session API. +```swift +import RTCHaishinKit + +await SessionBuilderFactory.shared.register(HTTPSessionFactory()) +``` + diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift new file mode 100644 index 000000000..8997b9e37 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/Array+Extension.swift @@ -0,0 +1,32 @@ +import Foundation + +extension Array where Element == String { + func withCStrings(_ body: ([UnsafePointer]) -> R) -> R { + var cStringPtrs: [UnsafePointer] = [] + cStringPtrs.reserveCapacity(count) + func loop(_ i: Int, _ current: [UnsafePointer], _ body: ([UnsafePointer]) -> R) -> R { + if i == count { + return body(current) + } + return self[i].withCString { cstr in + var next = current + next.append(cstr) + return loop(i + 1, next, body) + } + } + return loop(0, [], body) + } + + func withCStringArray(_ body: (UnsafeMutablePointer?>) -> R) -> R { + let cStrings = self.map { $0.utf8CString } + let pointerArray = UnsafeMutablePointer?>.allocate(capacity: cStrings.count) + for (i, cString) in cStrings.enumerated() { + cString.withUnsafeBufferPointer { buf in + pointerArray[i] = buf.baseAddress + } + } + let result = body(pointerArray) + pointerArray.deallocate() + return result + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift new file mode 100644 index 000000000..717ebb412 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/AudioCodecSettings.Format+Extension.swift @@ -0,0 +1,15 @@ +import HaishinKit +import libdatachannel + +extension AudioCodecSettings.Format { + var cValue: rtcCodec? { + switch self { + case .opus: + return RTC_CODEC_OPUS + case .aac: + return RTC_CODEC_AAC + case .pcm: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift new file mode 100644 index 000000000..87b2194e7 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Extension/VideoCodecSettings.Format+Extension.swift @@ -0,0 +1,13 @@ +import HaishinKit +import libdatachannel + +extension VideoCodecSettings.Format { + var cValue: rtcCodec { + switch self { + case .h264: + return RTC_CODEC_H264 + case .hevc: + return RTC_CODEC_H265 + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift new file mode 100644 index 000000000..dd2dabc7e --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSession.swift @@ -0,0 +1,142 @@ +import Foundation +import HaishinKit + +actor HTTPSession: Session { + var connected: Bool { + get async { + peerConnection?.connectionState == .connected + } + } + + @AsyncStreamed(.closed) + private(set) var readyState: AsyncStream + + var stream: any StreamConvertible { + _stream + } + + private let uri: URL + private var location: URL? + private var maxRetryCount: Int = 0 + private var _stream = RTCStream() + private var mode: SessionMode + private var configuration: HTTPSessionConfiguration? + private var peerConnection: RTCPeerConnection? + + init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) { + logger.level = .debug + self.uri = uri + self.mode = mode + if let configuration = configuration as? HTTPSessionConfiguration { + self.configuration = configuration + } + } + + func setMaxRetryCount(_ maxRetryCount: Int) { + self.maxRetryCount = maxRetryCount + } + + func connect(_ disconnected: @Sendable @escaping () -> Void) async throws { + guard _readyState.value == .closed else { + return + } + _readyState.value = .connecting + let peerConnection = try makePeerConnection() + switch mode { + case .publish: + let audioSettings = await _stream.audioSettings + try peerConnection.addTrack(AudioStreamTrack(audioSettings), stream: _stream) + let videoSettings = await _stream.videoSettings + try peerConnection.addTrack(VideoStreamTrack(videoSettings), stream: _stream) + case .playback: + await _stream.setDirection(.recvonly) + try peerConnection.addTransceiver(.audio, stream: _stream) + try peerConnection.addTransceiver(.video, stream: _stream) + } + do { + self.peerConnection = peerConnection + try peerConnection.setLocalDesciption(.offer) + let answer = try await requestOffer(uri, offer: peerConnection.createOffer()) + try peerConnection.setRemoteDesciption(answer, type: .answer) + _readyState.value = .open + } catch { + logger.warn(error) + await _stream.close() + peerConnection.close() + _readyState.value = .closed + throw error + } + } + + func close() async throws { + guard let location, _readyState.value == .open else { + return + } + _readyState.value = .closing + var request = URLRequest(url: location) + request.httpMethod = "DELETE" + request.addValue("application/sdp", forHTTPHeaderField: "Content-Type") + _ = try await URLSession.shared.data(for: request) + await _stream.close() + peerConnection?.close() + self.location = nil + _readyState.value = .closed + } + + private func requestOffer(_ url: URL, offer: String) async throws -> String { + logger.debug(offer) + var request = URLRequest(url: url) + request.httpMethod = "POST" + request.addValue("application/sdp", forHTTPHeaderField: "Content-Type") + request.httpBody = offer.data(using: .utf8) + let (data, response) = try await URLSession.shared.data(for: request) + if let response = response as? HTTPURLResponse { + if let location = response.allHeaderFields["Location"] as? String { + if location.hasSuffix("http") { + self.location = URL(string: location) + } else { + var baseURL = "\(url.scheme ?? "http")://\(url.host ?? "")" + if let port = url.port { + baseURL += ":\(port)" + } + self.location = URL(string: "\(baseURL)\(location)") + } + } + } + return String(data: data, encoding: .utf8) ?? "" + } + + private func makePeerConnection() throws -> RTCPeerConnection { + let conneciton = try RTCPeerConnection(configuration) + conneciton.delegate = self + return conneciton + } +} + +extension HTTPSession: RTCPeerConnectionDelegate { + // MARK: RTCPeerConnectionDelegate + nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged state: RTCPeerConnection.ConnectionState) { + Task { + if state == .connected { + if await mode == .publish { + await _stream.setDirection(.sendonly) + } + } + } + } + + nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState) { + } + + nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState) { + } + + nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged gatheringState: RTCPeerConnection.IceGatheringState) { + } + + nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate) { + } + + nonisolated func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) { + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift new file mode 100644 index 000000000..20ee716c7 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionConfiguration.swift @@ -0,0 +1,20 @@ +import HaishinKit + +/// A configuration object that defines options for an HTTPSession. +/// +/// The properties of this structure are internally converted into +/// an `RTCConfiguration` and applied when creating the underlying +/// `RTCPeerConnection`. +/// +public struct HTTPSessionConfiguration: SessionConfiguration, RTCConfigurationConvertible { + public var iceServers: [String] = [] + public var bindAddress: String? + public var certificateType: RTCCertificateType? + public var iceTransportPolicy: RTCTransportPolicy? + public var isIceUdpMuxEnabled: Bool = false + public var isAutoNegotionEnabled: Bool = true + public var isForceMediaTransport: Bool = false + public var portRange: Range? + public var mtu: Int32? + public var maxMesasgeSize: Int32? +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift new file mode 100644 index 000000000..02e21e8d7 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/HTTP/HTTPSessionFactory.swift @@ -0,0 +1,13 @@ +import Foundation +import HaishinKit + +public struct HTTPSessionFactory: SessionFactory { + public let supportedProtocols: Set = ["http", "https"] + + public init() { + } + + public func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session { + return HTTPSession(uri: uri, mode: mode, configuration: configuration) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift new file mode 100644 index 000000000..67da5c56f --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCCertificateType.swift @@ -0,0 +1,20 @@ +import libdatachannel + +public enum RTCCertificateType: Sendable, Encodable { + case `default` + case ECDSA + case RSA +} + +extension RTCCertificateType { + var cValue: rtcCertificateType { + switch self { + case .default: + return RTC_CERTIFICATE_DEFAULT + case .ECDSA: + return RTC_CERTIFICATE_ECDSA + case .RSA: + return RTC_CERTIFICATE_RSA + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift new file mode 100644 index 000000000..fb8edf765 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCChannel.swift @@ -0,0 +1,17 @@ +import Foundation +import HaishinKit +import libdatachannel + +protocol RTCChannel { + var id: Int32 { get } + + func send(_ message: Data) throws +} + +extension RTCChannel { + public func send(_ message: Data) throws { + try RTCError.check(message.withUnsafeBytes { pointer in + return rtcSendMessage(id, pointer.bindMemory(to: CChar.self).baseAddress, Int32(message.count)) + }) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift new file mode 100644 index 000000000..dab56faa9 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCConfiguration.swift @@ -0,0 +1,101 @@ +import Foundation +import libdatachannel + +public protocol RTCConfigurationConvertible: Sendable { + /// A list of ICE server URLs used to establish the connection. + var iceServers: [String] { get } + /// The local IP address to bind sockets to. + var bindAddress: String? { get } + /// The type of certificate to generate for DTLS handshakes. + var certificateType: RTCCertificateType? { get } + /// The ICE transport policy that controls how candidates are gathered. + var iceTransportPolicy: RTCTransportPolicy? { get } + /// A Boolean value that indicates whether ICE UDP multiplexing is enabled. + var isIceUdpMuxEnabled: Bool { get } + /// A Boolean value that indicates whether negotiation is performed automatically. + var isAutoNegotionEnabled: Bool { get } + /// A Boolean value that forces the use of media transport even for data sessions. + var isForceMediaTransport: Bool { get } + /// The port range available for allocating ICE candidates. + var portRange: Range? { get } + /// The maximum transmission unit (MTU) for outgoing packets. + var mtu: Int32? { get } + /// The maximum message size allowed for data channels. + var maxMesasgeSize: Int32? { get } +} + +extension RTCConfigurationConvertible { + func createPeerConnection() -> Int32 { + return iceServers.withCStringArray { cIceServers in + return [bindAddress ?? ""].withCStrings { cStrings in + var config = rtcConfiguration() + if !iceServers.isEmpty { + config.iceServers = cIceServers + config.iceServersCount = Int32(iceServers.count) + } + if bindAddress != nil { + config.bindAddress = cStrings[0] + } + if let certificateType { + config.certificateType = certificateType.cValue + } + if let iceTransportPolicy { + config.iceTransportPolicy = iceTransportPolicy.cValue + } + config.enableIceUdpMux = isIceUdpMuxEnabled + config.disableAutoNegotiation = !isAutoNegotionEnabled + config.forceMediaTransport = isForceMediaTransport + if let portRange { + config.portRangeBegin = portRange.lowerBound + config.portRangeEnd = portRange.upperBound + } + if let mtu { + config.mtu = mtu + } + if let maxMesasgeSize { + config.maxMessageSize = maxMesasgeSize + } + return rtcCreatePeerConnection(&config) + } + } + } +} + +public struct RTCConfiguration: RTCConfigurationConvertible { + static let empty = RTCConfiguration() + + public let iceServers: [String] + public let bindAddress: String? + public let certificateType: RTCCertificateType? + public let iceTransportPolicy: RTCTransportPolicy? + public let isIceUdpMuxEnabled: Bool + public let isAutoNegotionEnabled: Bool + public let isForceMediaTransport: Bool + public let portRange: Range? + public let mtu: Int32? + public let maxMesasgeSize: Int32? + + public init( + iceServers: [String] = [], + bindAddress: String? = nil, + certificateType: RTCCertificateType? = nil, + iceTransportPolicy: RTCTransportPolicy? = nil, + isIceUdpMuxEnabled: Bool = false, + isAutoNegotionEnabled: Bool = true, + isForceMediaTransport: Bool = false, + portRange: Range? = nil, + mtu: Int32? = nil, + maxMesasgeSize: Int32? = nil + ) { + self.iceServers = iceServers + self.bindAddress = bindAddress + self.certificateType = certificateType + self.iceTransportPolicy = iceTransportPolicy + self.isIceUdpMuxEnabled = isIceUdpMuxEnabled + self.isAutoNegotionEnabled = isAutoNegotionEnabled + self.isForceMediaTransport = isForceMediaTransport + self.portRange = portRange + self.mtu = mtu + self.maxMesasgeSize = maxMesasgeSize + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift new file mode 100644 index 000000000..9d572db2b --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDataChannel.swift @@ -0,0 +1,121 @@ +import Foundation +import libdatachannel + +/// Delegate for receiving RTCDataChannel events. +public protocol RTCDataChannelDelegate: AnyObject { + /// Called when the readyState of the data channel changes. + /// - Parameters: + /// - dataChannel: The RTCDataChannel instance. + /// - readyState: The updated readyState. + func dataChannel(_ dataChannel: RTCDataChannel, readyStateChanged readyState: RTCDataChannel.ReadyState) + + /// Called when a binary message is received. + /// - Parameters: + /// - dataChannel: The RTCDataChannel instance. + /// - message: The received binary data. + func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessage message: Data) + + /// Called when a text message is received. + /// - Parameters: + /// - dataChannel: The RTCDataChannel instance. + /// - message: The received text message. + func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessage message: String) +} + +public final class RTCDataChannel: RTCChannel { + /// Represents the ready state of an RTCDataChannel. + public enum ReadyState { + /// The data channel is being created and the connection is in progress. + case connecting + /// The data channel is fully established and ready to send and receive messages. + case open + /// The data channel is in the process of closing. + case closing + /// The data channel has been closed and can no longer be used. + case closed + } + + public weak var delegate: (any RTCDataChannelDelegate)? + + /// The label. + public var label: String { + do { + return try CUtil.getString { buffer, size in + rtcGetDataChannelLabel(id, buffer, size) + } + } catch { + logger.warn(error) + return "" + } + } + + /// The stream id. + public var stream: Int { + Int(rtcGetDataChannelStream(id)) + } + + public private(set) var readyState: ReadyState = .connecting { + didSet { + delegate?.dataChannel(self, readyStateChanged: readyState) + } + } + + let id: Int32 + + init(id: Int32) throws { + self.id = id + try RTCError.check(id) + do { + try RTCError.check(rtcSetOpenCallback(id) { _, pointer in + guard let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .open + }) + try RTCError.check(rtcSetClosedCallback(id) { _, pointer in + guard let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .connecting + }) + try RTCError.check(rtcSetMessageCallback(id) { _, bytes, size, pointer in + guard let bytes, let pointer else { return } + if 0 <= size { + let data = Data(bytes: bytes, count: Int(size)) + Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(data) + } else { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(String(cString: bytes)) + } + }) + try RTCError.check(rtcSetErrorCallback(id) { _, error, pointer in + guard let error, let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().errorOccurred(String(cString: error)) + }) + rtcSetUserPointer(id, Unmanaged.passUnretained(self).toOpaque()) + } catch { + rtcDeleteDataChannel(id) + throw error + } + } + + deinit { + rtcDeleteDataChannel(id) + } + + public func send(_ message: String) throws { + guard let buffer = message.data(using: .utf8) else { + return + } + try RTCError.check(buffer.withUnsafeBytes { pointer in + return rtcSendMessage(id, pointer.bindMemory(to: CChar.self).baseAddress, -Int32(message.count)) + }) + } + + private func errorOccurred(_ error: String) { + logger.warn(error) + } + + private func didReceiveMessage(_ message: Data) { + delegate?.dataChannel(self, didReceiveMessage: message) + } + + private func didReceiveMessage(_ message: String) { + delegate?.dataChannel(self, didReceiveMessage: message) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift new file mode 100644 index 000000000..6e591be77 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCDirection.swift @@ -0,0 +1,24 @@ +import libdatachannel + +public enum RTCDirection: Sendable { + case unknown + case sendrecv + case sendonly + case recvonly + case inactive + + var cValue: rtcDirection { + switch self { + case .unknown: + return RTC_DIRECTION_UNKNOWN + case .sendrecv: + return RTC_DIRECTION_SENDRECV + case .sendonly: + return RTC_DIRECTION_SENDONLY + case .recvonly: + return RTC_DIRECTION_RECVONLY + case .inactive: + return RTC_DIRECTION_INACTIVE + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift new file mode 100644 index 000000000..52ffe6a7a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCError.swift @@ -0,0 +1,47 @@ +public enum RTCError: RawRepresentable, Swift.Error { + @discardableResult + static func check(_ result: Int32) throws -> Int32 { + if result < 0 { + throw RTCError(rawValue: result) + } + return result + } + + public typealias RawValue = Int32 + + case invalid + case failure + case notAvail + case tooSmall + case undefined(value: Int32) + + public var rawValue: Int32 { + switch self { + case .invalid: + return -1 + case .failure: + return -2 + case .notAvail: + return -3 + case .tooSmall: + return -4 + case .undefined(let value): + return value + } + } + + public init(rawValue: Int32) { + switch rawValue { + case -1: + self = .invalid + case -2: + self = .failure + case -3: + self = .notAvail + case -4: + self = .tooSmall + default: + self = .undefined(value: rawValue) + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift new file mode 100644 index 000000000..54b3b9604 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCIceCandidate.swift @@ -0,0 +1,21 @@ +import Foundation + +public struct RTCIceCandidate: Sendable { + public let candidate: String + public let mid: String +} + +extension RTCIceCandidate { + init(candidate: UnsafePointer?, mid: UnsafePointer?) { + if let candidate { + self.candidate = String(cString: candidate) + } else { + self.candidate = "" + } + if let mid { + self.mid = String(cString: mid) + } else { + self.mid = "" + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift new file mode 100644 index 000000000..d4236c903 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCLogger.swift @@ -0,0 +1,52 @@ +import libdatachannel + +/// An actor for writing interpolated string messages to `libdatachannel` logging system. +public actor RTCLogger { + /// Defines the logging severity levels supported by `libdatachannel`. + public enum Level { + /// No logs will be emitted. + case none + /// Fatal errors. + case fatal + /// Recoverable errors. + case error + /// Potential issues that should be noted. + case warning + /// General informational messages. + case info + /// Debug messages for development and troubleshooting. + case debug + /// Verbose messages for detailed tracing. + case verbose + + var cValue: rtcLogLevel { + switch self { + case .none: + return RTC_LOG_NONE + case .fatal: + return RTC_LOG_FATAL + case .error: + return RTC_LOG_ERROR + case .warning: + return RTC_LOG_WARNING + case .info: + return RTC_LOG_INFO + case .debug: + return RTC_LOG_DEBUG + case .verbose: + return RTC_LOG_VERBOSE + } + } + } + + /// The singleton logger instance. + public static let shared = RTCLogger() + + /// The current logging level. + public private(set) var level: Level = .none + + /// Sets the current logging level. + public func setLevel(_ level: Level) { + rtcInitLogger(level.cValue, nil) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift new file mode 100644 index 000000000..158704e83 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCPeerConnection.swift @@ -0,0 +1,467 @@ +import AVFAudio +import CoreMedia +import Foundation +import libdatachannel + +public protocol RTCPeerConnectionDelegate: AnyObject { + func peerConnection(_ peerConnection: RTCPeerConnection, connectionStateChanged connectionState: RTCPeerConnection.ConnectionState) + func peerConnection(_ peerConnection: RTCPeerConnection, iceGatheringStateChanged iceGatheringState: RTCPeerConnection.IceGatheringState) + func peerConnection(_ peerConnection: RTCPeerConnection, iceConnectionStateChanged iceConnectionState: RTCPeerConnection.IceConnectionState) + func peerConnection(_ peerConnection: RTCPeerConnection, signalingStateChanged signalingState: RTCPeerConnection.SignalingState) + func peerConnection(_ peerConneciton: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) + func peerConnection(_ peerConnection: RTCPeerConnection, gotIceCandidate candidated: RTCIceCandidate) +} + +public final class RTCPeerConnection { + /// Represents the state of a connection. + public enum ConnectionState: Sendable { + /// The connection has been created, but no connection attempt has started yet. + case new + /// A connection attempt is currently in progress. + case connecting + /// The connection has been successfully established. + case connected + /// The connection was previously established but is now temporarily lost. + case disconnected + /// The connection has encountered an unrecoverable error. + case failed + /// The connection has been closed and will not be used again. + case closed + } + + /// Represents the ICE gathering state of an RTCPeerConnection. + public enum IceGatheringState: Sendable { + /// ICE gathering has not yet started. + case new + /// The agent is currently gathering ICE candidates. + case inProgress + /// ICE gathering has finished. No more candidates will be gathered. + case complete + } + + /// Represents the state of the ICE connection for an RTCPeerConnection. + public enum IceConnectionState: Sendable { + /// The ICE agent is newly created and no checks have started yet. + case new + /// The ICE agent is checking candidate pairs to find a workable connection. + case checking + /// A usable ICE connection has been established. + case connected + /// ICE checks have completed successfully, and the connection is fully stable. + case completed + /// The ICE connection has failed and cannot recover. + case failed + /// The ICE connection has been lost or interrupted. + case disconnected + /// The ICE agent has been closed and will not be used again. + case closed + } + + /// Represents the signaling state of an RTCPeerConnection. + public enum SignalingState: Sendable { + /// The signaling state is stable; there is no outstanding local or remote offer. + case stable + /// A local offer has been created and set as the local description. + case haveLocalOffer + /// A remote offer has been received and set as the remote description. + case haveRemoteOffer + /// A provisional (pr-answer) has been set as the local description. + case haveLocalPRAnswer + /// A provisional (pr-answer) has been set as the remote description. + case haveRemotePRAnswer + } + + static let audioMediaDescription = """ +m=audio 9 UDP/TLS/RTP/SAVPF 111 +a=mid:0 +a=recvonly +a=rtpmap:111 opus/48000/2 +a=fmtp:111 minptime=10;useinbandfec=1;stereo=1;sprop-stereo=1 +""" + + static let videoMediaDescription = """ +m=video 9 UDP/TLS/RTP/SAVPF 98 +a=mid:1 +a=recvonly +a=rtpmap:98 H264/90000 +a=rtcp-fb:98 goog-remb +a=rtcp-fb:98 nack +a=rtcp-fb:98 nack pli +a=fmtp:98 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f +""" + + static let bufferSize: Int = 1024 * 16 + + /// Specifies the delegate of an RTCPeerConnection. + public weak var delegate: (any RTCPeerConnectionDelegate)? + /// The current state of connection. + public private(set) var connectionState: ConnectionState = .new { + didSet { + guard connectionState != oldValue else { + return + } + delegate?.peerConnection(self, connectionStateChanged: connectionState) + } + } + /// The current state of ice connection. + public private(set) var iceConnectionState: IceConnectionState = .new { + didSet { + guard iceConnectionState != oldValue else { + return + } + delegate?.peerConnection(self, iceConnectionStateChanged: iceConnectionState) + } + } + /// The current state of ice gathering. + public private(set) var iceGatheringState: IceGatheringState = .new { + didSet { + guard iceGatheringState != oldValue else { + return + } + delegate?.peerConnection(self, iceGatheringStateChanged: iceGatheringState) + } + } + /// The current state of signaling. + public private(set) var signalingState: SignalingState = .stable { + didSet { + guard signalingState != oldValue else { + return + } + delegate?.peerConnection(self, signalingStateChanged: signalingState) + } + } + /// Optional callback for receiving compressed video directly from opened tracks. + /// + /// When set, video tracks will deliver compressed `CMSampleBuffer`s to this callback + /// instead of routing through `IncomingStream`. Audio tracks still use `incomingStream`. + /// This enables the caller to handle video decode and PTS retiming externally + /// (matching the pattern used by RTMP/RTSP ingest paths). + public var onCompressedVideo: ((CMSampleBuffer) -> Void)? + + private let connection: Int32 + private(set) var localDescription: String = "" + private weak var incomingStream: RTCStream? + private var managedTrackIds: Set = [] + private var retainedTracks: [RTCTrack] = [] + private var callbackDelegates: [Any] = [] + + /// The current local SDP generated by the peer connection. + /// + /// This is updated asynchronously after calling `setLocalDesciption(_:)`. + public var localDescriptionSdp: String { + localDescription + } + + /// Creates a peerConnection instance. + public init(_ config: (some RTCConfigurationConvertible)? = nil) throws { + if let config { + connection = config.createPeerConnection() + } else { + connection = RTCConfiguration.empty.createPeerConnection() + } + try RTCError.check(connection) + do { + try RTCError.check(rtcSetLocalDescriptionCallback(connection) { _, sdp, _, pointer in + guard let pointer else { return } + if let sdp { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().localDescription = String(cString: sdp) + } + }) + try RTCError.check(rtcSetLocalCandidateCallback(connection) { _, candidate, mid, pointer in + guard let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().didGenerateCandidate(.init( + candidate: candidate, + mid: mid + )) + }) + try RTCError.check(rtcSetStateChangeCallback(connection) { _, state, pointer in + guard let pointer else { return } + if let state = ConnectionState(cValue: state) { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().connectionState = state + } + }) + try RTCError.check(rtcSetIceStateChangeCallback(connection) { _, state, pointer in + guard let pointer else { return } + if let state = IceConnectionState(cValue: state) { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().iceConnectionState = state + } + }) + try RTCError.check(rtcSetGatheringStateChangeCallback(connection) { _, gatheringState, pointer in + guard let pointer else { return } + if let gatheringState = IceGatheringState(cValue: gatheringState) { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().iceGatheringState = gatheringState + } + }) + try RTCError.check(rtcSetSignalingStateChangeCallback(connection) { _, signalingState, pointer in + guard let pointer else { return } + if let signalingState = SignalingState(cValue: signalingState) { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().signalingState = signalingState + } + }) + try RTCError.check(rtcSetTrackCallback(connection) { _, track, pointer in + guard let pointer else { return } + let pc = Unmanaged.fromOpaque(pointer).takeUnretainedValue() + // If this track ID was already created in addTransceiver, skip creating a + // duplicate RTCTrack. Creating a second RTCTrack for the same ID would + // overwrite libdatachannel callbacks and then deallocate, deleting the track. + guard !pc.managedTrackIds.contains(track) else { + return + } + if let newTrack = try? RTCTrack(id: track) { + pc.retainedTracks.append(newTrack) + pc.didOpenTrack(newTrack) + } + }) + try RTCError.check(rtcSetDataChannelCallback(connection) { _, dataChannel, pointer in + guard let pointer else { return } + if let channel = try? RTCDataChannel(id: dataChannel) { + Unmanaged.fromOpaque(pointer).takeUnretainedValue().didOpenDataChannel(channel) + } + }) + rtcSetUserPointer(connection, Unmanaged.passUnretained(self).toOpaque()) + } catch { + rtcDeletePeerConnection(connection) + throw error + } + } + + /// Attaches an ``RTCStream`` to receive incoming media. + /// + /// When remote tracks open, they will be bound to the stream so decoded + /// audio/video can be forwarded via the stream's outputs. + public func attachIncomingStream(_ stream: RTCStream) { + incomingStream = stream + } + + deinit { + close() + rtcDeletePeerConnection(connection) + } + + /// Adds a `MediaStreamTrack` to the peer connection and associates it with the given `MediaStream`. + /// + /// - Parameters: + /// - track: The media track to add (audio or video). + /// - stream: The `MediaStream` that the track belongs to. + public func addTrack(_ track: some RTCStreamTrack, stream: RTCStream) throws { + let msid = stream.id + switch track { + case let track as AudioStreamTrack: + let config = RTCTrackConfiguration(mid: "0", streamId: msid, audioCodecSettings: track.settings) + let id = try config.addTrack(connection, direction: .sendrecv) + Task { + await stream.addTrack(try RTCSendableStreamTrack(id, id: track.id)) + } + case let track as VideoStreamTrack: + let config = RTCTrackConfiguration(mid: "1", streamId: msid, videoCodecSettings: track.settings) + let id = try config.addTrack(connection, direction: .sendrecv) + Task { + await stream.addTrack(try RTCSendableStreamTrack(id, id: track.id)) + } + default: + break + } + } + + /// Adds a recvonly transceiver for the given kind, and binds it to the stream. + /// + /// This is used for receiving media from a remote publisher (ingest). + /// The track is retained internally to prevent deallocation (which would call rtcDeleteTrack). + public func addRecvonlyTransceiver(_ kind: RTCStreamKind, stream: RTCStream) throws { + let track = try addTransceiver(kind, stream: stream) + retainedTracks.append(track) + } + + @discardableResult + func addTransceiver(_ kind: RTCStreamKind, stream: RTCStream) throws -> RTCTrack { + let sdp: String + switch kind { + case .audio: + sdp = Self.audioMediaDescription + case .video: + sdp = Self.videoMediaDescription + } + let result = try RTCError.check(sdp.withCString { cString in + rtcAddTrack(connection, cString) + }) + managedTrackIds.insert(result) + let track = try RTCTrack(id: result) + track.delegate = stream + return track + } + + public func setRemoteDesciption(_ sdp: String, type: SDPSessionDescriptionType) throws { + logger.debug(sdp, type.rawValue) + try RTCError.check([sdp, type.rawValue].withCStrings { cStrings in + rtcSetRemoteDescription(connection, cStrings[0], cStrings[1]) + }) + } + + /// Adds a trickled remote ICE candidate. + /// + /// - Parameters: + /// - candidate: SDP candidate line (with or without the `a=` prefix). + /// - mid: Optional mid value. Pass `nil` to let libdatachannel autodetect. + public func addRemoteCandidate(_ candidate: String, mid: String? = nil) throws { + try RTCError.check([candidate, mid ?? ""].withCStrings { cStrings in + if mid == nil { + return rtcAddRemoteCandidate(connection, cStrings[0], nil) + } else { + return rtcAddRemoteCandidate(connection, cStrings[0], cStrings[1]) + } + }) + } + + public func setLocalDesciption(_ type: SDPSessionDescriptionType) throws { + logger.debug(type.rawValue) + try RTCError.check([type.rawValue].withCStrings { cStrings in + rtcSetLocalDescription(connection, cStrings[0]) + }) + } + + public func createOffer() throws -> String { + return try CUtil.getString { buffer, size in + rtcCreateOffer(connection, buffer, size) + } + } + + public func createAnswer() throws -> String { + return try CUtil.getString { buffer, size in + rtcCreateAnswer(connection, buffer, size) + } + } + + public func createDataChannel(_ label: String) throws -> RTCDataChannel { + let result = try RTCError.check([label].withCStrings { cStrings in + rtcCreateDataChannel(connection, cStrings[0]) + }) + return try RTCDataChannel(id: result) + } + + public func close() { + do { + try RTCError.check(rtcClosePeerConnection(connection)) + } catch { + logger.warn(error) + } + } + + private func didGenerateCandidate(_ candidated: RTCIceCandidate) { + delegate?.peerConnection(self, gotIceCandidate: candidated) + } + + private func didOpenTrack(_ track: RTCTrack) { + logger.info(track) + // Route video tracks to the external callback (if set) for direct decode, + // and audio tracks to the RTCStream/IncomingStream path. + if let onCompressedVideo, track.description.lowercased().contains("m=video") { + let delegate = VideoCallbackTrackDelegate(onCompressedVideo) + callbackDelegates.append(delegate) + track.delegate = delegate + } else if let incomingStream { + track.delegate = incomingStream + } + } + + private func didOpenDataChannel(_ dataChannel: RTCDataChannel) { + delegate?.peerConnection(self, didOpen: dataChannel) + } +} + +/// Routes compressed video from an RTCTrack directly to a callback, +/// bypassing IncomingStream/VideoCodec/MediaLink. +private class VideoCallbackTrackDelegate: RTCTrackDelegate { + let callback: (CMSampleBuffer) -> Void + + init(_ callback: @escaping (CMSampleBuffer) -> Void) { + self.callback = callback + } + + func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState) {} + + func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer) { + callback(buffer) + } + + func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) { + // Audio is handled by IncomingStream via the RTCStream path. + } +} + +extension RTCPeerConnection.ConnectionState { + init?(cValue: rtcState) { + switch cValue { + case RTC_NEW: + self = .new + case RTC_CONNECTING: + self = .connecting + case RTC_CONNECTED: + self = .connected + case RTC_DISCONNECTED: + self = .disconnected + case RTC_FAILED: + self = .failed + case RTC_CLOSED: + self = .closed + default: + return nil + } + } +} + +extension RTCPeerConnection.IceGatheringState { + init?(cValue: rtcGatheringState) { + switch cValue { + case RTC_GATHERING_NEW: + self = .new + case RTC_GATHERING_INPROGRESS: + self = .inProgress + case RTC_GATHERING_COMPLETE: + self = .complete + default: + return nil + } + } +} + +extension RTCPeerConnection.IceConnectionState { + init?(cValue: rtcIceState) { + switch cValue { + case RTC_ICE_NEW: + self = .new + case RTC_ICE_CHECKING: + self = .checking + case RTC_ICE_CONNECTED: + self = .connected + case RTC_ICE_COMPLETED: + self = .completed + case RTC_ICE_FAILED: + self = .failed + case RTC_ICE_DISCONNECTED: + self = .disconnected + case RTC_ICE_CLOSED: + self = .closed + default: + return nil + } + } +} + +extension RTCPeerConnection.SignalingState { + init?(cValue: rtcSignalingState) { + switch cValue { + case RTC_SIGNALING_STABLE: + self = .stable + case RTC_SIGNALING_HAVE_LOCAL_OFFER: + self = .haveLocalOffer + case RTC_SIGNALING_HAVE_REMOTE_OFFER: + self = .haveRemoteOffer + case RTC_SIGNALING_HAVE_LOCAL_PRANSWER: + self = .haveLocalPRAnswer + case RTC_SIGNALING_HAVE_REMOTE_PRANSWER: + self = .haveRemotePRAnswer + default: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift new file mode 100644 index 000000000..bf2daa07a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCSendableStreamTrack.swift @@ -0,0 +1,25 @@ +import AVFoundation +import HaishinKit +import libdatachannel + +actor RTCSendableStreamTrack: RTCStreamTrack { + let id: String + private let track: RTCTrack + + init(_ tid: Int32, id: String) throws { + track = try RTCTrack(id: tid) + self.id = id + } + + func send(_ buffer: CMSampleBuffer) { + track.send(buffer) + } + + func send(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime) { + track.send(buffer, when: when) + } + + func setDelegate(_ delegate: some RTCTrackDelegate) { + track.delegate = delegate + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift new file mode 100644 index 000000000..5ce2be587 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStream.swift @@ -0,0 +1,186 @@ +import AVFoundation +import HaishinKit +import libdatachannel + +public actor RTCStream { + enum Error: Swift.Error { + case unsupportedCodec + } + + static let supportedAudioCodecs: [AudioCodecSettings.Format] = [.opus] + static let supportedVideoCodecs: [VideoCodecSettings.Format] = [.h264] + + let id: String = UUID().uuidString + private(set) var tracks: [RTCSendableStreamTrack] = [] + public private(set) var readyState: StreamReadyState = .idle + public private(set) var videoTrackId: UInt8? = UInt8.max + public private(set) var audioTrackId: UInt8? = UInt8.max + package lazy var incoming = IncomingStream(self) + package lazy var outgoing: OutgoingStream = { + var stream = OutgoingStream() + stream.audioSettings = .init(channelMap: [0, 0], format: .opus) + return stream + }() + package var outputs: [any StreamOutput] = [] + package var bitRateStrategy: (any StreamBitRateStrategy)? + private var direction: RTCDirection = .sendonly + + public init() { + } + + public func addOutput(_ output: any StreamOutput) { + outputs.append(output) + } + + public func removeAllOutputs() { + outputs.removeAll() + } + + public func setDirection(_ direction: RTCDirection) { + self.direction = direction + switch direction { + case .recvonly: + Task { + await incoming.startRunning() + } + case .sendonly, .sendrecv: + outgoing.startRunning() + Task { + for await audio in outgoing.audioOutputStream { + append(audio.0, when: audio.1) + } + } + Task { + for await video in outgoing.videoOutputStream { + append(video) + } + } + Task { + for await video in outgoing.videoInputStream { + outgoing.append(video: video) + } + } + default: + break + } + } + + public func close() async { + tracks.removeAll() + switch direction { + case .sendonly: + outgoing.stopRunning() + case .recvonly: + Task { + await incoming.stopRunning() + } + default: + break + } + } + + func addTrack(_ track: RTCSendableStreamTrack) async { + await track.setDelegate(self) + tracks.append(track) + } +} + +extension RTCStream: _Stream { + public func setAudioSettings(_ audioSettings: AudioCodecSettings) throws { + guard Self.supportedAudioCodecs.contains(audioSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.audioSettings = audioSettings + } + + public func setVideoSettings(_ videoSettings: VideoCodecSettings) throws { + guard Self.supportedVideoCodecs.contains(videoSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.videoSettings = videoSettings + } + + public func append(_ sampleBuffer: CMSampleBuffer) { + switch sampleBuffer.formatDescription?.mediaType { + case .video: + if sampleBuffer.formatDescription?.isCompressed == true { + Task { + for track in tracks { + await track.send(sampleBuffer) + } + } + } else { + outgoing.append(sampleBuffer) + outputs.forEach { $0.stream(self, didOutput: sampleBuffer) } + } + case .audio: + if sampleBuffer.formatDescription?.isCompressed == true { + Task { await incoming.append(sampleBuffer) } + } else { + outgoing.append(sampleBuffer) + } + default: + break + } + } + + public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + switch audioBuffer { + case let audioBuffer as AVAudioPCMBuffer: + outgoing.append(audioBuffer, when: when) + outputs.forEach { $0.stream(self, didOutput: audioBuffer, when: when) } + case let audioBuffer as AVAudioCompressedBuffer: + Task { + for track in tracks { + await track.send(audioBuffer, when: when) + } + } + default: + break + } + } + + public func dispatch(_ event: NetworkMonitorEvent) async { + await bitRateStrategy?.adjustBitrate(event, stream: self) + } +} + +extension RTCStream: RTCTrackDelegate { + // MARK: RTCTrackDelegate + nonisolated func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState) { + } + + nonisolated func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer) { + Task { + await incoming.append(buffer) + } + } + + nonisolated func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) { + Task { + await incoming.append(buffer, when: when) + } + } +} + +extension RTCStream: MediaMixerOutput { + // MARK: MediaMixerOutput + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) { + switch mediaType { + case .audio: + audioTrackId = id + case .video: + videoTrackId = id + default: + break + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { await append(sampleBuffer) } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + Task { await append(buffer, when: when) } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift new file mode 100644 index 000000000..63c7301cf --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamKind.swift @@ -0,0 +1,4 @@ +public enum RTCStreamKind { + case audio + case video +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift new file mode 100644 index 000000000..51a07885f --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCStreamTrack.swift @@ -0,0 +1,29 @@ +import AVFAudio +import CoreMedia +import Foundation +import HaishinKit +import libdatachannel + +public protocol RTCStreamTrack: Sendable { + var id: String { get } +} + +public struct AudioStreamTrack: RTCStreamTrack, Sendable { + public let id: String + public let settings: AudioCodecSettings + + public init(_ settings: AudioCodecSettings) { + self.id = UUID().uuidString + self.settings = settings + } +} + +public struct VideoStreamTrack: RTCStreamTrack, Sendable { + public let id: String + public let settings: VideoCodecSettings + + public init(_ settings: VideoCodecSettings) { + self.id = UUID().uuidString + self.settings = settings + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift new file mode 100644 index 000000000..ce9ee4ebc --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrack.swift @@ -0,0 +1,180 @@ +import AVFAudio +import CoreMedia +import Foundation +import libdatachannel + +protocol RTCTrackDelegate: AnyObject { + func track(_ track: RTCTrack, readyStateChanged readyState: RTCTrack.ReadyState) + func track(_ track: RTCTrack, didOutput buffer: CMSampleBuffer) + func track(_ track: RTCTrack, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) +} + +class RTCTrack: RTCChannel { + enum ReadyState { + case connecting + case open + case closing + case closed + } + + let id: Int32 + weak var delegate: (any RTCTrackDelegate)? + + var mid: String { + do { + return try CUtil.getString { buffer, size in + rtcGetTrackMid(id, buffer, size) + } + } catch { + logger.warn(error) + return "" + } + } + + var description: String { + do { + return try CUtil.getString { buffer, size in + rtcGetTrackDescription(id, buffer, size) + } + } catch { + logger.warn(error) + return "" + } + } + + var ssrc: UInt32 { + do { + return try CUtil.getUInt32 { buffer, size in + rtcGetSsrcsForTrack(id, buffer, size) + } + } catch { + logger.warn(error) + return 0 + } + } + + private(set) var readyState: ReadyState = .connecting { + didSet { + switch readyState { + case .connecting: + break + case .open: + do { + packetizer = try makePacketizer() + } catch { + logger.warn(error) + } + case .closing: + break + case .closed: + break + } + delegate?.track(self, readyStateChanged: readyState) + } + } + + private var packetizer: (any RTPPacketizer)? + + init(id: Int32) throws { + self.id = id + try RTCError.check(id) + do { + rtcSetUserPointer(id, Unmanaged.passUnretained(self).toOpaque()) + try RTCError.check(rtcSetOpenCallback(id) { _, pointer in + guard let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .open + }) + try RTCError.check(rtcSetClosedCallback(id) { _, pointer in + guard let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().readyState = .closed + }) + try RTCError.check(rtcSetMessageCallback(id) { _, bytes, size, pointer in + guard let bytes, let pointer else { return } + if 0 <= size { + let data = Data(bytes: bytes, count: Int(size)) + Unmanaged.fromOpaque(pointer).takeUnretainedValue().didReceiveMessage(data) + } + }) + try RTCError.check(rtcSetErrorCallback(id) { _, error, pointer in + guard let error, let pointer else { return } + Unmanaged.fromOpaque(pointer).takeUnretainedValue().errorOccurred(String(cString: error)) + }) + } catch { + rtcDeleteTrack(id) + throw error + } + } + + deinit { + rtcDeleteTrack(id) + } + + func send(_ buffer: CMSampleBuffer) { + packetizer?.append(buffer) { packet in + try? send(packet.data) + } + } + + func send(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime) { + packetizer?.append(buffer, when: when) { packet in + try? send(packet.data) + } + } + + func didReceiveMessage(_ message: Data) { + do { + let packet = try RTPPacket(message) + packetizer?.append(packet) + } catch { + logger.warn(error) + } + } + + private func errorOccurred(_ error: String) { + logger.warn(error) + } + + private func makePacketizer() throws -> (any RTPPacketizer)? { + let description = try SDPMediaDescription(sdp: description) + var result: (any RTPPacketizer)? + let rtpmap = description.attributes.compactMap { attr -> (UInt8, String, Int, Int?)? in + if case let .rtpmap(payload, codec, clock, channel) = attr { return (payload, codec, clock, channel) } + return nil + } + guard !rtpmap.isEmpty else { + return nil + } + switch rtpmap[0].1.lowercased() { + case "opus": + let packetizer = RTPOpusPacketizer(ssrc: ssrc, payloadType: description.payload) + packetizer.delegate = self + result = packetizer + case "h264": + let packetizer = RTPH264Packetizer(ssrc: ssrc, payloadType: description.payload) + packetizer.delegate = self + result = packetizer + default: + break + } + for attribute in description.attributes { + switch attribute { + case .fmtp(_, let params): + result?.formatParameter = RTPFormatParameter(params) + default: + break + } + } + return result + } +} + +extension RTCTrack: RTPPacketizerDelegate { + // MARK: RTPPacketizerDelegate + func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: CMSampleBuffer) { + delegate?.track(self, didOutput: buffer) + } + + func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) { + delegate?.track(self, didOutput: buffer, when: when) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift new file mode 100644 index 000000000..8c356e0b6 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTrackConfiguration.swift @@ -0,0 +1,73 @@ +import Foundation +import HaishinKit +import libdatachannel + +struct RTCTrackConfiguration: Sendable { + private static func generateSSRC() -> UInt32 { + var ssrc: UInt32 = 0 + repeat { + ssrc = UInt32.random(in: 1...UInt32.max) + } while ssrc == 0 + return ssrc + } + + private static func generateCName() -> String { + return String(UUID().uuidString.replacingOccurrences(of: "-", with: "").prefix(16)) + } + + let codec: rtcCodec + let ssrc: UInt32 + let pt: Int32 + let mid: String + let name: String + let msid: String + let trackId: String + let profile: String? +} + +extension RTCTrackConfiguration { + init(mid: String, streamId: String, audioCodecSettings: AudioCodecSettings) { + self.codec = audioCodecSettings.format.cValue ?? RTC_CODEC_OPUS + self.ssrc = Self.generateSSRC() + self.pt = 111 + self.mid = mid + self.name = Self.generateCName() + self.msid = streamId + self.trackId = UUID().uuidString + self.profile = "minptime=10;useinbandfec=1;stereo=1;sprop-stereo=1" + } + + init(mid: String, streamId: String, videoCodecSettings: VideoCodecSettings) { + self.codec = videoCodecSettings.format.cValue + self.ssrc = Self.generateSSRC() + self.pt = 98 + self.mid = mid + self.name = Self.generateCName() + self.msid = streamId + self.trackId = UUID().uuidString + self.profile = nil + } +} + +extension RTCTrackConfiguration { + func addTrack(_ connection: Int32, direction: RTCDirection) throws -> Int32 { + var rtcTrackInit = makeRtcTrackInit(direction) + let result = try RTCError.check(rtcAddTrackEx(connection, &rtcTrackInit)) + return result + } + + private func makeRtcTrackInit(_ direction: RTCDirection) -> rtcTrackInit { + // TODO: Fix memory leak + return rtcTrackInit( + direction: direction.cValue, + codec: codec, + payloadType: pt, + ssrc: ssrc, + mid: strdup(mid), + name: strdup(name), + msid: strdup(msid), + trackId: strdup(trackId), + profile: profile == nil ? nil : strdup(profile) + ) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift new file mode 100644 index 000000000..3cfbbddc6 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTC/RTCTransportPolicy.swift @@ -0,0 +1,17 @@ +import libdatachannel + +public enum RTCTransportPolicy: Sendable, Encodable { + case all + case relay +} + +extension RTCTransportPolicy { + var cValue: rtcTransportPolicy { + switch self { + case .all: + return RTC_TRANSPORT_POLICY_ALL + case .relay: + return RTC_TRANSPORT_POLICY_RELAY + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift new file mode 100644 index 000000000..22514e53b --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPFormatParameter.swift @@ -0,0 +1,40 @@ +import Foundation + +@dynamicMemberLookup +struct RTPFormatParameter: Sendable { + static let empty = RTPFormatParameter() + + private let data: [String: String] + + subscript(dynamicMember key: String) -> Int? { + guard let value = data[key] else { + return nil + } + return Int(value) + } + + subscript(dynamicMember key: String) -> Bool { + guard let value = data[key] else { + return false + } + return value == "1" || value == "true" + } +} + +extension RTPFormatParameter { + init() { + self.data = [:] + } + + init(_ value: String) { + var data: [String: String] = [:] + let pairs = value.split(separator: ";") + for pair in pairs { + let parts = pair.split(separator: "=", maxSplits: 1).map { $0.trimmingCharacters(in: .whitespaces) } + if parts.count == 2 { + data[parts[0]] = parts[1] + } + } + self.data = data + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift new file mode 100644 index 000000000..4dd428955 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPH264Packetizer.swift @@ -0,0 +1,317 @@ +import AVFAudio +import CoreMedia +import Foundation +import HaishinKit + +private let RTPH264Packetizer_startCode = Data([0x00, 0x00, 0x00, 0x01]) + +/// https://datatracker.ietf.org/doc/html/rfc3984 +final class RTPH264Packetizer: RTPPacketizer { + let ssrc: UInt32 + let payloadType: UInt8 + weak var delegate: T? + var formatParameter = RTPFormatParameter() + private var sequenceNumber: UInt16 = 0 + private var buffer = Data() + private var nalUnitReader = NALUnitReader() + private var pictureParameterSets: Data? + private var sequenceParameterSets: Data? + private var formatDescription: CMFormatDescription? + + // for FragmentUnitA + private var fragmentedBuffer = Data() + private var fragmentedStarted = false + private var fragmentedTimestamp: UInt32 = 0 + private var timestamp: RTPTimestamp = .init(90000) + + private lazy var jitterBuffer: RTPJitterBuffer = { + let jitterBuffer = RTPJitterBuffer() + jitterBuffer.delegate = self + return jitterBuffer + }() + + init(ssrc: UInt32, payloadType: UInt8) { + self.ssrc = ssrc + self.payloadType = payloadType + } + + func append(_ packet: RTPPacket) { + jitterBuffer.append(packet) + } + + func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void) { + let nals = nalUnitReader.read(buffer) + for i in 0.. Void) { + } + + private func decode(_ packet: RTPPacket) { + guard !packet.payload.isEmpty else { + return + } + let nalUnitType = packet.payload[0] & 0x1F + switch nalUnitType { + case 1...23: + decodeSingleNALUnit(packet) + case 24: + decodeStapA(packet) + case 28: + decodeFragmentUnitA(packet) + default: + logger.warn("undefined nal unit type = ", nalUnitType) + } + } + + /// STAP-A (Single-Time Aggregation Packet) + /// - SeeAlso: RFC 3984 section 5.7.1 + private func decodeStapA(_ packet: RTPPacket) { + // payload[0] = STAP-A indicator (F | NRI | Type=24) + // then: [NALU-size:16][NALU-data] repeating + let payload = packet.payload + guard payload.count >= 1 + 2 else { + return + } + + var offset = 1 + var nalUnits: [Data] = [] + nalUnits.reserveCapacity(4) + + while offset + 2 <= payload.count { + let size = (UInt16(payload[offset]) << 8) | UInt16(payload[offset + 1]) + offset += 2 + guard size > 0 else { + logger.warn("decodeStapA(_:) > invalid nalu size = 0") + break + } + let end = offset + Int(size) + guard end <= payload.count else { + logger.warn("decodeStapA(_:) > bufferUnderrun") + break + } + nalUnits.append(Data(payload[offset.. CMSampleBuffer? { + guard formatDescription != nil else { + return nil + } + let presentationTimeStamp: CMTime = self.timestamp.convert(timestamp) + let units = nalUnitReader.read(&buffer, type: H264NALUnit.self) + var blockBuffer: CMBlockBuffer? + ISOTypeBufferUtil.toNALFileFormat(&buffer) + blockBuffer = buffer.makeBlockBuffer() + var sampleSizes: [Int] = [] + var sampleBuffer: CMSampleBuffer? + var timing = CMSampleTimingInfo( + duration: .invalid, + presentationTimeStamp: presentationTimeStamp, + decodeTimeStamp: .invalid + ) + sampleSizes.append(buffer.count) + guard let blockBuffer, CMSampleBufferCreate( + allocator: kCFAllocatorDefault, + dataBuffer: blockBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: formatDescription, + sampleCount: sampleSizes.count, + sampleTimingEntryCount: 1, + sampleTimingArray: &timing, + sampleSizeEntryCount: sampleSizes.count, + sampleSizeArray: &sampleSizes, + sampleBufferOut: &sampleBuffer) == noErr else { + return nil + } + sampleBuffer?.isNotSync = !units.contains { $0.type == .idr } + return sampleBuffer + } + + private func makeFormatDescription() -> CMFormatDescription? { + guard let pictureParameterSets, let sequenceParameterSets else { + return nil + } + let pictureParameterSetArray = [pictureParameterSets.bytes] + let sequenceParameterSetArray = [sequenceParameterSets.bytes] + return pictureParameterSetArray[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let ppsBaseAddress = ppsBuffer.baseAddress else { + return nil + } + return sequenceParameterSetArray[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let spsBaseAddress = spsBuffer.baseAddress else { + return nil + } + let pointers: [UnsafePointer] = [ + spsBaseAddress.assumingMemoryBound(to: UInt8.self), + ppsBaseAddress.assumingMemoryBound(to: UInt8.self) + ] + let sizes: [Int] = [spsBuffer.count, ppsBuffer.count] + let nalUnitHeaderLength: Int32 = 4 + var formatDescriptionOut: CMFormatDescription? + CMVideoFormatDescriptionCreateFromH264ParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: pointers.count, + parameterSetPointers: pointers, + parameterSetSizes: sizes, + nalUnitHeaderLength: nalUnitHeaderLength, + formatDescriptionOut: &formatDescriptionOut + ) + return formatDescriptionOut + } + } + } +} + +extension RTPH264Packetizer: RTPJitterBufferDelegate { + // MARK: RTPJitterBufferDelegate + func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket) { + decode(sequenced) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift new file mode 100644 index 000000000..aee2d471b --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPJitterBuffer.swift @@ -0,0 +1,27 @@ +import Foundation + +protocol RTPJitterBufferDelegate: AnyObject { + func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket) +} + +final class RTPJitterBuffer { + weak var delegate: T? + + private var buffer: [UInt16: RTPPacket] = [:] + private var expectedSequence: UInt16 = 0 + private let stalePacketCounts: Int = 4 + + func append(_ packet: RTPPacket) { + buffer[packet.sequenceNumber] = packet + + while let packet = buffer[expectedSequence] { + delegate?.jitterBuffer(self, sequenced: packet) + buffer.removeValue(forKey: expectedSequence) + expectedSequence &+= 1 + } + + if stalePacketCounts <= buffer.count { + expectedSequence &+= 1 + } + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift new file mode 100644 index 000000000..1ec6c4bff --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPOpusPacketizer.swift @@ -0,0 +1,101 @@ +import AVFAudio +import CoreMedia +import Foundation +import HaishinKit + +private let kRTPOpusPacketizer_sampleRate = 48000.0 + +final class RTPOpusPacketizer: RTPPacketizer { + let ssrc: UInt32 + let payloadType: UInt8 + weak var delegate: T? + var formatParameter = RTPFormatParameter() + private var timestamp: RTPTimestamp = .init(kRTPOpusPacketizer_sampleRate) + private var audioFormat: AVAudioFormat? + private var sequenceNumber: UInt16 = 0 + private lazy var jitterBuffer: RTPJitterBuffer = { + let jitterBuffer = RTPJitterBuffer() + jitterBuffer.delegate = self + return jitterBuffer + }() + + init(ssrc: UInt32, payloadType: UInt8) { + self.ssrc = ssrc + self.payloadType = payloadType + } + + func append(_ packet: RTPPacket) { + jitterBuffer.append(packet) + } + + func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void) { + } + + func append(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime, lambda: (RTPPacket) -> Void) { + lambda(RTPPacket( + version: RTPPacket.version, + padding: false, + extension: false, + cc: 0, + marker: true, + payloadType: payloadType, + sequenceNumber: sequenceNumber, + timestamp: timestamp.convert(when), + ssrc: ssrc, + payload: Data( + bytes: buffer.data.assumingMemoryBound(to: UInt8.self), + count: Int(buffer.byteLength) + ) + )) + sequenceNumber &+= 1 + } + + private func decode(_ packet: RTPPacket) { + if audioFormat == nil { + if let formatDescription = makeFormatDescription() { + audioFormat = .init(cmAudioFormatDescription: formatDescription) + } + } + if let audioFormat { + let buffer = AVAudioCompressedBuffer(format: audioFormat, packetCapacity: 1, maximumPacketSize: packet.payload.count) + packet.copyBytes(to: buffer) + delegate?.packetizer(self, didOutput: buffer, when: timestamp.convert(packet.timestamp)) + } + } + + package func makeFormatDescription() -> CMFormatDescription? { + var formatDescription: CMAudioFormatDescription? + let framesPerPacket = AVAudioFrameCount(kRTPOpusPacketizer_sampleRate * 0.02) + var audioStreamBasicDescription = AudioStreamBasicDescription( + mSampleRate: kRTPOpusPacketizer_sampleRate, + mFormatID: kAudioFormatOpus, + mFormatFlags: 0, + mBytesPerPacket: 0, + mFramesPerPacket: framesPerPacket, + mBytesPerFrame: 0, + mChannelsPerFrame: formatParameter.stereo == true ? 2 : 1, + mBitsPerChannel: 0, + mReserved: 0 + ) + guard CMAudioFormatDescriptionCreate( + allocator: kCFAllocatorDefault, + asbd: &audioStreamBasicDescription, + layoutSize: 0, + layout: nil, + magicCookieSize: 0, + magicCookie: nil, + extensions: nil, + formatDescriptionOut: &formatDescription + ) == noErr else { + return nil + } + return formatDescription + } +} + +extension RTPOpusPacketizer: RTPJitterBufferDelegate { + // MARK: RTPJitterBufferDelegate + func jitterBuffer(_ buffer: RTPJitterBuffer>, sequenced: RTPPacket) { + decode(sequenced) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift new file mode 100644 index 000000000..060a3508b --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacket.swift @@ -0,0 +1,90 @@ +import AVFAudio +import Foundation + +/// https://datatracker.ietf.org/doc/html/rfc3550 +struct RTPPacket: Sendable { + static let version: UInt8 = 2 + static let headerSize: Int = 12 + + enum Error: Swift.Error { + case bufferUnderrun + } + + let version: UInt8 + let padding: Bool + let `extension`: Bool + let cc: UInt8 + let marker: Bool + let payloadType: UInt8 + let sequenceNumber: UInt16 + let timestamp: UInt32 + let ssrc: UInt32 + let payload: Data + + func copyBytes(to buffer: AVAudioCompressedBuffer) { + let byteLength = UInt32(payload.count) + buffer.packetDescriptions?.pointee = AudioStreamPacketDescription( + mStartOffset: 0, + mVariableFramesInPacket: 0, + mDataByteSize: byteLength + ) + buffer.packetCount = 1 + buffer.byteLength = byteLength + payload.withUnsafeBytes { pointer in + guard let baseAddress = pointer.baseAddress else { + return + } + buffer.data.copyMemory(from: baseAddress, byteCount: payload.count) + } + } +} + +extension RTPPacket { + var data: Data { + var data = Data() + var first: UInt8 = (version & 0x03) << 6 + first |= (padding ? 1 : 0) << 5 + first |= (`extension` ? 1 : 0) << 4 + first |= cc & 0x0F + data.append(first) + var second: UInt8 = (marker ? 1 : 0) << 7 + second |= payloadType & 0x7F + data.append(second) + data.append(contentsOf: [ + UInt8(sequenceNumber >> 8), + UInt8(sequenceNumber & 0xFF) + ]) + data.append(contentsOf: [ + UInt8(timestamp >> 24), + UInt8((timestamp >> 16) & 0xFF), + UInt8((timestamp >> 8) & 0xFF), + UInt8(timestamp & 0xFF) + ]) + data.append(contentsOf: [ + UInt8(ssrc >> 24), + UInt8((ssrc >> 16) & 0xFF), + UInt8((ssrc >> 8) & 0xFF), + UInt8(ssrc & 0xFF) + ]) + data.append(payload) + return data + } + + init(_ data: Data) throws { + guard RTPPacket.headerSize < data.count else { + throw Error.bufferUnderrun + } + let first = data[0] + version = (first & 0b11000000) >> 6 + padding = (first & 0b00100000) >> 5 == 1 + `extension` = (first & 0b00010000) >> 4 == 1 + cc = (first & 0b00001111) + let second = data[1] + marker = (second & 0b10000000) >> 7 == 1 + payloadType = (second & 0b01111111) + sequenceNumber = UInt16(data[2]) << 8 | UInt16(data[3]) + timestamp = UInt32(data: data[4...7]).bigEndian + ssrc = UInt32(data: data[8...11]).bigEndian + payload = Data(data[12...]) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift new file mode 100644 index 000000000..fdb331db3 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPPacketizer.swift @@ -0,0 +1,21 @@ +import AVFAudio +import CoreMedia + +protocol RTPPacketizerDelegate: AnyObject { + func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: CMSampleBuffer) + func packetizer(_ packetizer: some RTPPacketizer, didOutput buffer: AVAudioCompressedBuffer, when: AVAudioTime) +} + +protocol RTPPacketizer { + associatedtype T: RTPPacketizerDelegate + + var delegate: T? { get set } + var ssrc: UInt32 { get } + var formatParameter: RTPFormatParameter { get set } + + func append(_ packet: RTPPacket) + + func append(_ buffer: CMSampleBuffer, lambda: (RTPPacket) -> Void) + + func append(_ buffer: AVAudioCompressedBuffer, when: AVAudioTime, lambda: (RTPPacket) -> Void) +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift new file mode 100644 index 000000000..94cc6b6a3 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/RTP/RTPTimestamp.swift @@ -0,0 +1,49 @@ +import AVFAudio +import CoreMedia +import Foundation + +struct RTPTimestamp { + static let startedAt: Double = -1 + + private let rate: Double + private var startedAt = Self.startedAt + + init(_ rate: Double) { + self.rate = rate + } + + func convert(_ timestamp: UInt32) -> AVAudioTime { + return AVAudioTime(hostTime: AVAudioTime.hostTime(forSeconds: Double(timestamp) / rate)) + } + + func convert(_ timestamp: UInt32) -> CMTime { + return CMTime(value: CMTimeValue(timestamp), timescale: CMTimeScale(rate)) + } + + mutating func convert(_ when: AVAudioTime) -> UInt32 { + let seconds: Double + if when.hostTime != 0 { + seconds = AVAudioTime.seconds(forHostTime: when.hostTime) + } else { + seconds = Double(when.sampleTime) / when.sampleRate + } + if startedAt == Self.startedAt { + startedAt = seconds + } + let timestamp = UInt64((seconds - startedAt) * rate) + return UInt32(timestamp & 0xFFFFFFFF) + } + + mutating func convert(_ time: CMTime) -> UInt32 { + let seconds = time.seconds + if startedAt == Self.startedAt { + startedAt = seconds + } + let timestamp = UInt64((seconds - startedAt) * rate) + return UInt32(timestamp & 0xFFFFFFFF) + } + + mutating func reset() { + startedAt = Self.startedAt + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift new file mode 100644 index 000000000..4cfdd6059 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPMediaDescription.swift @@ -0,0 +1,97 @@ +import Foundation + +struct SDPMediaDescription { + enum Error: Swift.Error { + case invalidArguments(_ sdp: String) + } + + static let m = "m=" + static let mid = "a=mid:" + static let fmtp = "a=fmtp:" + static let rtpmap = "a=rtpmap:" + static let rtcpFb = "a=rtcp-fb:" + + enum Attribute { + case rtpmap(payload: UInt8, codec: String, clock: Int, channels: Int?) + case fmtp(payload: UInt8, params: String) + case rtcpFb(payload: UInt8, type: String) + case mid(String) + case direction(String) + case rtcpMux + case other(key: String, value: String?) + } + + let kind: String + let payload: UInt8 + let attributes: [Attribute] +} + +extension SDPMediaDescription { + init(sdp: String) throws { + var kind: String? + var payload: UInt8? + var attributes: [Attribute] = [] + let lines = sdp.replacingOccurrences(of: "\r\n", with: "\n").split(separator: "\n") + for line in lines { + switch true { + case line.hasPrefix(Self.m): + // m=audio 9 UDP/TLS/RTP/SAVPF 111 + let components = line.dropFirst(Self.m.count).split(separator: " ") + guard 4 <= components.count else { + break + } + kind = String(components[0]) + if let _payload = UInt8(components[3]) { + payload = _payload + } + case line.hasPrefix(Self.mid): + // a=mid:0 + attributes.append(.mid(String(line.dropFirst(Self.mid.count)))) + case line.hasPrefix(Self.rtpmap): + // a=rtpmap:111 opus/48000/2 + let components = line.dropFirst(Self.rtpmap.count).split(separator: " ") + guard 2 <= components.count else { + break + } + let codec = components[1].split(separator: "/") + guard 2 <= codec.count else { + break + } + if let payload = UInt8(components[0]), let clock = Int(codec[1]) { + attributes.append(.rtpmap( + payload: payload, + codec: String(codec[0]), + clock: clock, + channels: 2 < codec.count ? Int(codec[2]) : nil + )) + } + case line.hasPrefix(Self.rtcpFb): + // a=rtcp-fb:96 nack + let components = line.dropFirst(Self.rtcpFb.count).split(separator: " ") + guard 2 <= components.count else { + break + } + if let payload = UInt8(components[0]) { + attributes.append(.rtcpFb(payload: payload, type: String(components[1]))) + } + case line.hasPrefix(Self.fmtp): + // a=fmtp:111 minptime=10;useinbandfec=1 + let components = line.dropFirst(Self.fmtp.count).split(separator: " ") + guard 2 <= components.count else { + break + } + if let payload = UInt8(components[0]) { + attributes.append(.fmtp(payload: payload, params: String(components[1]))) + } + default: + break + } + } + guard let kind, let payload else { + throw Error.invalidArguments(sdp) + } + self.kind = kind + self.payload = payload + self.attributes = attributes + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift new file mode 100644 index 000000000..8f94412ed --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/SDP/SDPSessionDescriptionType.swift @@ -0,0 +1,8 @@ +import libdatachannel + +public enum SDPSessionDescriptionType: String, Sendable { + case answer + case offer + case pranswer + case rollback +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift new file mode 100644 index 000000000..d195879c5 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/CUtil.swift @@ -0,0 +1,21 @@ +import Foundation + +enum CUtil { + static func getString( + _ lambda: (UnsafeMutablePointer?, Int32) -> Int32 + ) throws -> String { + let size = try RTCError.check(lambda(nil, 0)) + var buffer = [CChar](repeating: 0, count: Int(size)) + _ = lambda(&buffer, Int32(size)) + return String(cString: &buffer) + } + + static func getUInt32( + _ lambda: (UnsafeMutablePointer?, Int32) -> Int32 + ) throws -> UInt32 { + let size = try RTCError.check(lambda(nil, 0)) + var buffer: UInt32 = 0 + _ = lambda(&buffer, Int32(size)) + return buffer + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift new file mode 100644 index 000000000..618655c75 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Sources/Util/Constants.swift @@ -0,0 +1,6 @@ +import Logboard + +/// The identifier for the HaishinKit WebRTC integration. +public let kRTCHaishinKitIdentifier = "com.haishinkit.RTCHaishinKit" + +nonisolated(unsafe) let logger = LBLogger.with(kRTCHaishinKitIdentifier) diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift new file mode 100644 index 000000000..5bee437be --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPFormatParameterTests.swift @@ -0,0 +1,13 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTCHaishinKit + +@Suite struct RTPFormatParamterTests { + @Test func opus() throws { + let parameter = RTPFormatParameter("minptime=10;useinbandfec=1;stereo=1") + #expect(parameter.stereo == true) + #expect(parameter.minptime == 10) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift new file mode 100644 index 000000000..8d7d12568 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPJitterBufferTests.swift @@ -0,0 +1,32 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTCHaishinKit + +@Suite struct RTPJitterBufferTests { + final class Result: RTPJitterBufferDelegate { + var count = 0 + + func jitterBuffer(_ buffer: RTPJitterBuffer, sequenced: RTPPacket) { + count += 1 + } + } + + @Test func lostPacket() throws { + let result = Result() + let buffer = RTPJitterBuffer() + buffer.delegate = result + var packets: [RTPPacket] = [] + for i in 0...100 { + packets.append(.init(version: 2, padding: false, extension: false, cc: 0, marker: false, payloadType: 0, sequenceNumber: UInt16(i), timestamp: UInt32(960 * (i + 1)), ssrc: 0, payload: Data())) + } + packets.remove(at: 30) + packets.remove(at: 50) + for packet in packets { + buffer.append(packet) + } + + #expect(result.count == 99) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift new file mode 100644 index 000000000..954ebc390 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPPacketTests.swift @@ -0,0 +1,30 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTCHaishinKit + +@Suite struct RTPPacketTests { + @Test func packet1() throws { + let data = Data([128, 226, 2, 7, 0, 1, 201, 8, 14, 44, 247, 214, 28, 76, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 128]) + let packet = try RTPPacket(data) + #expect(packet.version == 2) + #expect(packet.padding == false) + #expect(packet.marker == true) + #expect(packet.payloadType == 98) + #expect(packet.sequenceNumber == 519) + #expect(packet.timestamp == 117000) + #expect(packet.ssrc == 237828054) + #expect(packet.data == data) + } + + @Test func packet2() throws { + let data = Data([128, 111, 0, 173, 0, 2, 136, 192, 41, 147, 97, 224, 252, 23, 218, 183, 83, 181, 164, 207, 10, 78, 74, 42, 42, 249, 40, 72, 142, 88, 51, 132, 23, 107, 145, 143, 6, 242, 109, 235, 187, 177, 55, 195, 232, 243, 46, 157, 1, 21, 214, 170, 16, 197, 227, 115, 186, 183, 132, 229, 107, 63, 238, 227, 166, 27, 77, 128, 120, 49, 249, 174, 241, 250, 236, 250, 154, 140, 253, 68, 152, 242, 187, 1, 196, 52, 198, 130, 62, 235, 20, 196, 1, 223, 126, 158, 142, 138, 35, 215, 22, 252, 235, 69, 166, 241, 237, 13, 155, 16, 6, 61, 26, 138, 90, 207, 213, 22, 33, 198, 209, 110, 198, 118, 174, 232, 21, 6, 206, 237, 190, 47, 214, 61, 161, 168, 192, 17, 248, 87, 21, 172, 79, 90, 183, 66, 221, 232, 206, 153, 205, 57, 195, 106, 119, 12, 130, 190, 105, 234, 116, 78, 72, 190, 85, 189, 149, 73, 150, 139, 147, 230, 71, 149, 39, 87, 207, 245, 247, 226, 176, 246, 14, 220, 3, 158, 81, 129, 96, 13, 52, 126, 49, 139, 179, 176, 108, 0, 220, 77, 40, 5, 201, 219, 218, 86, 76, 241, 204, 152, 209, 215, 241, 18, 247, 151, 206, 20, 110, 188, 245, 89, 25, 254, 206, 87, 76, 210, 51, 55, 117, 127, 177, 149, 13, 23, 226, 214, 24, 122, 205, 225, 42, 66, 172, 10, 16, 129, 222, 50, 253, 125, 178, 46, 221, 91, 181, 171, 83, 85, 164, 240, 245, 80, 240]) + let packet = try RTPPacket(data) + #expect(packet.version == 2) + #expect(packet.padding == false) + #expect(packet.payloadType == 111) + #expect(packet.sequenceNumber == 173) + #expect(packet.data == data) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift new file mode 100644 index 000000000..4d8c037aa --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/RTP/RTPTimestampTests.swift @@ -0,0 +1,14 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTCHaishinKit + +@Suite struct RTPTimestampTests { + @Test func convertRTPPacketTimestamp_H264() throws { + var timestamp = RTPTimestamp(90000.0) + #expect(timestamp.convert(CMTime(value: 511364443358833, timescale: 1000000000)) == 0) + #expect(timestamp.convert(CMTime(value: 511364476594833, timescale: 1000000000)) == 2991) + #expect(timestamp.convert(CMTime(value: 511364509930833, timescale: 1000000000)) == 5991) + } +} diff --git a/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift new file mode 100644 index 000000000..a5e8ce69c --- /dev/null +++ b/Vendor/HaishinKit.swift/RTCHaishinKit/Tests/SDP/SDPMediaDescriptionTests.swift @@ -0,0 +1,65 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTCHaishinKit + +@Suite struct SDPMediaDescriptionTests { + @Test func opus() throws { + let sdp = """ + m=audio 9 UDP/TLS/RTP/SAVPF 111 + c=IN IP4 0.0.0.0 + a=rtpmap:111 opus/48000/2 + a=fmtp:111 minptime=10;useinbandfec=1 + a=rtcp-mux + a=rtcp-rsize + a=sendrecv + a=mid:0 + """ + + let mediaDescription = try SDPMediaDescription(sdp: sdp) + #expect(mediaDescription.kind == "audio") + #expect(mediaDescription.payload == 111) + for attributes in mediaDescription.attributes { + switch attributes { + case .rtpmap(let payload, let codec, let clock, let channels): + #expect(payload == 111) + #expect(codec == "opus") + #expect(clock == 48000) + #expect(channels == 2) + case .mid(let mid): + #expect(mid == "0") + default: + break + } + } + let rtpmap = mediaDescription.attributes.compactMap { attr -> (UInt8, String, Int, Int?)? in + if case let .rtpmap(payload, codec, clock, channel) = attr { return (payload, codec, clock, channel) } + return nil + } + #expect(rtpmap[0].0 == 111) + #expect(rtpmap[0].1 == "opus") + #expect(rtpmap[0].2 == 48000) + #expect(rtpmap[0].3 == 2) + } + + @Test func vp8() throws { + let sdp = """ + m=video 9 UDP/TLS/RTP/SAVPF 96 + c=IN IP4 0.0.0.0 + a=rtpmap:96 VP8/90000 + a=rtcp-fb:96 ccm fir + a=rtcp-fb:96 nack + a=rtcp-fb:96 nack pli + a=rtcp-fb:96 goog-remb + a=rtcp-fb:96 transport-cc + a=rtcp-mux + a=rtcp-rsize + a=sendrecv + a=mid:1 + """ + let mediaDescription = try SDPMediaDescription(sdp: sdp) + #expect(mediaDescription.kind == "video") + #expect(mediaDescription.payload == 96) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift new file mode 100644 index 000000000..f9a4c5053 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF0Serializer.swift @@ -0,0 +1,399 @@ +import Foundation +import HaishinKit + +enum AMFSerializerError: Error { + case deserialize + case outOfIndex +} + +// MARK: - +protocol AMFSerializer: ByteArrayConvertible { + var reference: AMFReference { get set } + + @discardableResult + func serialize(_ value: Bool) -> Self + func deserialize() throws -> Bool + + @discardableResult + func serialize(_ value: String) -> Self + func deserialize() throws -> String + + @discardableResult + func serialize(_ value: Int) -> Self + func deserialize() throws -> Int + + @discardableResult + func serialize(_ value: Double) -> Self + func deserialize() throws -> Double + + @discardableResult + func serialize(_ value: Date) -> Self + func deserialize() throws -> Date + + @discardableResult + func serialize(_ value: [(any Sendable)?]) -> Self + func deserialize() throws -> [(any Sendable)?] + + @discardableResult + func serialize(_ value: AMFArray) -> Self + func deserialize() throws -> AMFArray + + @discardableResult + func serialize(_ value: AMFObject) -> Self + func deserialize() throws -> AMFObject + + @discardableResult + func serialize(_ value: AMFXMLDocument) -> Self + func deserialize() throws -> AMFXMLDocument + + @discardableResult + func serialize(_ value: (any Sendable)?) -> Self + func deserialize() throws -> (any Sendable)? +} + +enum AMF0Type: UInt8 { + case number = 0x00 + case bool = 0x01 + case string = 0x02 + case object = 0x03 + // case MovieClip = 0x04 + case null = 0x05 + case undefined = 0x06 + case reference = 0x07 + case ecmaArray = 0x08 + case objectEnd = 0x09 + case strictArray = 0x0a + case date = 0x0b + case longString = 0x0c + case unsupported = 0x0d + // case RecordSet = 0x0e + case xmlDocument = 0x0f + case typedObject = 0x10 + case avmplush = 0x11 +} + +// MARK: - AMF0Serializer +final class AMF0Serializer: ByteArray { + var reference = AMFReference() +} + +extension AMF0Serializer: AMFSerializer { + // MARK: AMFSerializer + @discardableResult + func serialize(_ value: (any Sendable)?) -> Self { + if value == nil { + return writeUInt8(AMF0Type.null.rawValue) + } + switch value { + case let value as Int: + return serialize(Double(value)) + case let value as UInt: + return serialize(Double(value)) + case let value as Int8: + return serialize(Double(value)) + case let value as UInt8: + return serialize(Double(value)) + case let value as Int16: + return serialize(Double(value)) + case let value as UInt16: + return serialize(Double(value)) + case let value as Int32: + return serialize(Double(value)) + case let value as UInt32: + return serialize(Double(value)) + case let value as Float: + return serialize(Double(value)) + case let value as CGFloat: + return serialize(Double(value)) + case let value as Double: + return serialize(Double(value)) + case let value as Date: + return serialize(value) + case let value as String: + return serialize(value) + case let value as Bool: + return serialize(value) + case let value as [(any Sendable)?]: + return serialize(value) + case let value as AMFArray: + return serialize(value) + case let value as AMFObject: + return serialize(value) + default: + return writeUInt8(AMF0Type.undefined.rawValue) + } + } + + func deserialize() throws -> (any Sendable)? { + guard let type = AMF0Type(rawValue: try readUInt8()) else { + return nil + } + position -= 1 + switch type { + case .number: + return try deserialize() as Double + case .bool: + return try deserialize() as Bool + case .string: + return try deserialize() as String + case .object: + return try deserialize() as AMFObject + case .null: + position += 1 + return nil + case .undefined: + position += 1 + return kAMFUndefined + case .reference: + assertionFailure("TODO") + return nil + case .ecmaArray: + return try deserialize() as AMFArray + case .objectEnd: + assertionFailure() + return nil + case .strictArray: + return try deserialize() as [(any Sendable)?] + case .date: + return try deserialize() as Date + case .longString: + return try deserialize() as String + case .unsupported: + assertionFailure("Unsupported") + return nil + case .xmlDocument: + return try deserialize() as AMFXMLDocument + case .typedObject: + return nil + case .avmplush: + assertionFailure("TODO") + return nil + } + } + + /** + * - seealso: 2.2 Number Type + */ + func serialize(_ value: Double) -> Self { + writeUInt8(AMF0Type.number.rawValue).writeDouble(value) + } + + func deserialize() throws -> Double { + guard try readUInt8() == AMF0Type.number.rawValue else { + throw AMFSerializerError.deserialize + } + return try readDouble() + } + + func serialize(_ value: Int) -> Self { + serialize(Double(value)) + } + + func deserialize() throws -> Int { + Int(try deserialize() as Double) + } + + /** + * - seealso: 2.3 Boolean Type + */ + func serialize(_ value: Bool) -> Self { + writeBytes(Data([AMF0Type.bool.rawValue, value ? 0x01 : 0x00])) + } + + func deserialize() throws -> Bool { + guard try readUInt8() == AMF0Type.bool.rawValue else { + throw AMFSerializerError.deserialize + } + return try readUInt8() == 0x01 ? true : false + } + + /** + * - seealso: 2.4 String Type + */ + func serialize(_ value: String) -> Self { + let isLong = UInt32(UInt16.max) < UInt32(value.count) + writeUInt8(isLong ? AMF0Type.longString.rawValue : AMF0Type.string.rawValue) + return serializeUTF8(value, isLong) + } + + func deserialize() throws -> String { + switch try readUInt8() { + case AMF0Type.string.rawValue: + return try deserializeUTF8(false) + case AMF0Type.longString.rawValue: + return try deserializeUTF8(true) + default: + assertionFailure() + return "" + } + } + + /** + * 2.5 Object Type + * typealias ECMAObject = [String, Any?] + */ + func serialize(_ value: AMFObject) -> Self { + writeUInt8(AMF0Type.object.rawValue) + for (key, data) in value { + serializeUTF8(key, false).serialize(data) + } + return serializeUTF8("", false).writeUInt8(AMF0Type.objectEnd.rawValue) + } + + func deserialize() throws -> AMFObject { + var result = AMFObject() + + switch try readUInt8() { + case AMF0Type.null.rawValue: + return result + case AMF0Type.object.rawValue: + break + default: + throw AMFSerializerError.deserialize + } + + while true { + let key: String = try deserializeUTF8(false) + guard !key.isEmpty else { + position += 1 + break + } + result[key] = try deserialize() + } + + return result + } + + /** + * - seealso: 2.10 ECMA Array Type + */ + func serialize(_ value: AMFArray) -> Self { + writeUInt8(AMF0Type.ecmaArray.rawValue) + writeUInt32(UInt32(value.data.count)) + value.data.enumerated().forEach { index, value in + serializeUTF8(index.description, false).serialize(value) + } + value.dict.forEach { key, value in + serializeUTF8(key, false).serialize(value) + } + serializeUTF8("", false) + writeUInt8(AMF0Type.objectEnd.rawValue) + return self + } + + func deserialize() throws -> AMFArray { + switch try readUInt8() { + case AMF0Type.null.rawValue: + return AMFArray() + case AMF0Type.ecmaArray.rawValue: + break + default: + throw AMFSerializerError.deserialize + } + + var result = AMFArray(count: Int(try readUInt32())) + while true { + let key = try deserializeUTF8(false) + guard !key.isEmpty else { + position += 1 + break + } + result[key] = try deserialize() + } + + return result + } + + /** + * - seealso: 2.12 Strict Array Type + */ + func serialize(_ value: [(any Sendable)?]) -> Self { + writeUInt8(AMF0Type.strictArray.rawValue) + if value.isEmpty { + writeBytes(Data([0x00, 0x00, 0x00, 0x00])) + return self + } + writeUInt32(UInt32(value.count)) + for v in value { + serialize(v) + } + return self + } + + func deserialize() throws -> [(any Sendable)?] { + guard try readUInt8() == AMF0Type.strictArray.rawValue else { + throw AMFSerializerError.deserialize + } + var result: [(any Sendable)?] = [] + let count = Int(try readUInt32()) + for _ in 0.. Self { + writeUInt8(AMF0Type.date.rawValue).writeDouble(value.timeIntervalSince1970 * 1000).writeBytes(Data([0x00, 0x00])) + } + + func deserialize() throws -> Date { + guard try readUInt8() == AMF0Type.date.rawValue else { + throw AMFSerializerError.deserialize + } + let date = Date(timeIntervalSince1970: try readDouble() / 1000) + position += 2 // timezone offset + return date + } + + /** + * - seealso: 2.17 XML Document Type + */ + func serialize(_ value: AMFXMLDocument) -> Self { + writeUInt8(AMF0Type.xmlDocument.rawValue).serializeUTF8(value.description, true) + } + + func deserialize() throws -> AMFXMLDocument { + guard try readUInt8() == AMF0Type.xmlDocument.rawValue else { + throw AMFSerializerError.deserialize + } + return AMFXMLDocument(data: try deserializeUTF8(true)) + } + + func deserialize() throws -> AMFTypedObject { + guard try readUInt8() == AMF0Type.typedObject.rawValue else { + throw AMFSerializerError.deserialize + } + + let typeName = try deserializeUTF8(false) + var result = AMFObject() + while true { + let key = try deserializeUTF8(false) + guard !key.isEmpty else { + position += 1 + break + } + result[key] = try deserialize() + } + + return AMFTypedObject(typeName: typeName, data: result) + } + + @discardableResult + private func serializeUTF8(_ value: String, _ isLong: Bool) -> Self { + let utf8 = Data(value.utf8) + if isLong { + writeUInt32(UInt32(utf8.count)) + } else { + writeUInt16(UInt16(utf8.count)) + } + return writeBytes(utf8) + } + + private func deserializeUTF8(_ isLong: Bool) throws -> String { + let length: Int = isLong ? Int(try readUInt32()) : Int(try readUInt16()) + return try readUTF8Bytes(length) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift new file mode 100644 index 000000000..788f29eb4 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMF3Serializer.swift @@ -0,0 +1,574 @@ +import Foundation + +final class AMFReference { + var strings: [String] = [] + var objects: [Any] = [] + + func getString(_ index: Int) throws -> String { + if strings.count <= index { + throw AMFSerializerError.outOfIndex + } + return strings[index] + } + + func getObject(_ index: Int) throws -> Any { + if objects.count <= index { + throw AMFSerializerError.outOfIndex + } + return objects[index] + } + + func indexOf(_ value: T) -> Int? { + for (index, data) in objects.enumerated() { + if let data: T = data as? T, data == value { + return index + } + } + return nil + } + + func indexOf(_ value: [Int32]) -> Int? { + nil + } + + func indexOf(_ value: [UInt32]) -> Int? { + nil + } + + func indexOf(_ value: [Double]) -> Int? { + nil + } + + func indexOf(_ value: [Any?]) -> Int? { + nil + } + + func indexOf(_ value: AMFObject) -> Int? { + for (index, data) in objects.enumerated() { + if let data: AMFObject = data as? AMFObject, data.description == value.description { + return index + } + } + return nil + } + + func indexOf(_ value: String) -> Int? { + strings.firstIndex(of: value) + } +} + +enum AMF3Type: UInt8 { + case undefined = 0x00 + case null = 0x01 + case boolFalse = 0x02 + case boolTrue = 0x03 + case integer = 0x04 + case number = 0x05 + case string = 0x06 + case xml = 0x07 + case date = 0x08 + case array = 0x09 + case object = 0x0A + case xmlString = 0x0B + case byteArray = 0x0C + case vectorInt = 0x0D + case vectorUInt = 0x0E + case vectorNumber = 0x0F + case vectorObject = 0x10 + case dictionary = 0x11 +} + +// MARK: - +/** + AMF3 Serializer + + - seealso: http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/amf/pdf/amf-file-format-spec.pdf + */ +final class AMF3Serializer: ByteArray { + var reference = AMFReference() +} + +extension AMF3Serializer: AMFSerializer { + // MARK: AMFSerializer + @discardableResult + func serialize(_ value: (any Sendable)?) -> Self { + if value == nil { + return writeUInt8(AMF3Type.null.rawValue) + } + switch value { + case let value as Int: + return serialize(Double(value)) + case let value as UInt: + return serialize(Double(value)) + case let value as Int8: + return serialize(Double(value)) + case let value as UInt8: + return serialize(Double(value)) + case let value as Int16: + return serialize(Double(value)) + case let value as UInt16: + return serialize(Double(value)) + case let value as Int32: + return serialize(Double(value)) + case let value as UInt32: + return serialize(Double(value)) + case let value as Float: + return serialize(Double(value)) + case let value as CGFloat: + return serialize(Double(value)) + case let value as Double: + return serialize(Double(value)) + case let value as Date: + return serialize(value) + case let value as String: + return serialize(value) + case let value as Bool: + return serialize(value) + case let value as AMFArray: + return serialize(value) + case let value as AMFObject: + return serialize(value) + default: + return writeUInt8(AMF3Type.undefined.rawValue) + } + } + + func deserialize() throws -> (any Sendable)? { + guard let type = AMF3Type(rawValue: try readUInt8()) else { + throw AMFSerializerError.deserialize + } + position -= 1 + switch type { + case .undefined: + position += 1 + return kAMFUndefined + case .null: + position += 1 + return nil + case .boolFalse: + return try deserialize() as Bool + case .boolTrue: + return try deserialize() as Bool + case .integer: + return try deserialize() as Int + case .number: + return try deserialize() as Double + case .string: + return try deserialize() as String + case .xml: + return try deserialize() as AMFXMLDocument + case .date: + return try deserialize() as Date + case .array: + return try deserialize() as AMFArray + case .object: + return try deserialize() as AMFObject + case .xmlString: + return try deserialize() as AMFXML + case .byteArray: + return try deserialize() as Data + case .vectorInt: + return try deserialize() as [Int32] + case .vectorUInt: + return try deserialize() as [UInt32] + case .vectorNumber: + return try deserialize() as [Double] + case .vectorObject: + return try deserialize() as [(any Sendable)?] + case .dictionary: + assertionFailure("Unsupported") + return nil + } + } + + /** + - seealso: 3.4 false Type + - seealso: 3.5 true type + */ + @discardableResult + func serialize(_ value: Bool) -> Self { + writeUInt8(value ? AMF3Type.boolTrue.rawValue : AMF3Type.boolFalse.rawValue) + } + + func deserialize() throws -> Bool { + switch try readUInt8() { + case AMF3Type.boolTrue.rawValue: + return true + case AMF3Type.boolFalse.rawValue: + return false + default: + throw AMFSerializerError.deserialize + } + } + + /** + - seealso: 3.6 integer type + */ + @discardableResult + func serialize(_ value: Int) -> Self { + writeUInt8(AMF3Type.integer.rawValue).serializeU29(value) + } + + func deserialize() throws -> Int { + guard try readUInt8() == AMF3Type.integer.rawValue else { + throw AMFSerializerError.deserialize + } + return try deserializeU29() + } + + /** + - seealso: 3.7 double type + */ + @discardableResult + func serialize(_ value: Double) -> Self { + writeUInt8(AMF3Type.number.rawValue).writeDouble(value) + } + + func deserialize() throws -> Double { + guard try readUInt8() == AMF3Type.number.rawValue else { + throw AMFSerializerError.deserialize + } + return try readDouble() + } + + /** + - seealso: 3.8 String type + */ + @discardableResult + func serialize(_ value: String) -> Self { + writeUInt8(AMF3Type.string.rawValue).serializeUTF8(value) + } + + func deserialize() throws -> String { + guard try readUInt8() == AMF3Type.string.rawValue else { + throw AMFSerializerError.deserialize + } + return try deserializeUTF8() + } + + /** + - seealso: 3.9 XML type + */ + @discardableResult + func serialize(_ value: AMFXMLDocument) -> Self { + writeUInt8(AMF3Type.xml.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + let utf8 = Data(value.description.utf8) + return serialize(utf8.count << 1 | 0x01).writeBytes(utf8) + } + + func deserialize() throws -> AMFXMLDocument { + guard try readUInt8() == AMF3Type.xml.rawValue else { + throw AMFSerializerError.deserialize + } + let refs: Int = try deserializeU29() + if (refs & 0x01) == 0 { + guard let document: AMFXMLDocument = try reference.getObject(refs >> 1) as? AMFXMLDocument else { + throw AMFSerializerError.deserialize + } + return document + } + let document = AMFXMLDocument(data: try readUTF8Bytes(refs >> 1)) + reference.objects.append(document) + return document + } + + /** + - seealso: 3.10 Date type + */ + @discardableResult + func serialize(_ value: Date) -> Self { + writeUInt8(AMF3Type.date.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + return serializeU29(0x01).writeDouble(value.timeIntervalSince1970 * 1000) + } + + func deserialize() throws -> Date { + guard try readUInt8() == AMF3Type.date.rawValue else { + throw AMFSerializerError.deserialize + } + let refs: Int = try deserializeU29() + if (refs & 0x01) == 0 { + guard let date: Date = try reference.getObject(refs >> 1) as? Date else { + throw AMFSerializerError.deserialize + } + return date + } + let date = Date(timeIntervalSince1970: try readDouble() / 1000) + reference.objects.append(date) + return date + } + + /** + - seealso: 3.11 Array type + */ + @discardableResult + func serialize(_ value: AMFArray) -> Self { + writeUInt8(AMF3Type.array.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + serialize(value.length << 1 | 0x01) + for (key, value) in value.dict { + serialize(key).serialize(value) + } + serialize("") + for value in value.data { + serialize(value) + } + return self + } + + func deserialize() throws -> AMFArray { + guard try readUInt8() == AMF3Type.array.rawValue else { + throw AMFSerializerError.deserialize + } + return AMFArray() + } + + /** + - seealso: 3.12 Object type + - note: ASObject = Dictionary + */ + @discardableResult + func serialize(_ value: AMFObject) -> Self { + writeUInt8(AMF3Type.object.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + for (key, value) in value { + serialize(key).serialize(value) + } + return serialize("") + } + + func deserialize() throws -> AMFObject { + guard try readUInt8() == AMF3Type.object.rawValue else { + throw AMFSerializerError.deserialize + } + return AMFObject() + } + + /** + - seealso: 3.13 XML type + */ + @discardableResult + func serialize(_ value: AMFXML) -> Self { + writeUInt8(AMF3Type.xmlString.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + let utf8 = Data(value.description.utf8) + return serialize(utf8.count << 1 | 0x01).writeBytes(utf8) + } + + func deserialize() throws -> AMFXML { + guard try readUInt8() == AMF3Type.xml.rawValue else { + throw AMFSerializerError.deserialize + } + let refs: Int = try deserializeU29() + if (refs & 0x01) == 0 { + guard let xml: AMFXML = try reference.getObject(refs >> 1) as? AMFXML else { + throw AMFSerializerError.deserialize + } + return xml + } + let xml = AMFXML(data: try readUTF8Bytes(refs >> 1)) + reference.objects.append(xml) + return xml + } + + /** + - seealso: 3.14 ByteArray type + - note: flash.utils.ByteArray = lf.ByteArray + */ + @discardableResult + func serialize(_ value: Data) -> Self { + self + } + + func deserialize() throws -> Data { + Data() + } + + /** + - seealso: 3.15 Vector Type, vector-int-type + */ + @discardableResult + func serialize(_ value: [Int32]) -> Self { + writeUInt8(AMF3Type.vectorInt.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + serializeU29(value.count << 1 | 0x01).writeUInt8(0x00) + for v in value { + writeInt32(v) + } + return self + } + + func deserialize() throws -> [Int32] { + guard try readUInt8() == AMF3Type.vectorInt.rawValue else { + throw AMFSerializerError.deserialize + } + return [] + } + + /** + - seealso: 3.15 Vector Type, vector-uint-type + */ + @discardableResult + func serialize(_ value: [UInt32]) -> Self { + writeUInt8(AMF3Type.vectorUInt.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + serializeU29(value.count << 1 | 0x01).writeUInt8(0x00) + for v in value { + writeUInt32(v) + } + return self + } + + func deserialize() throws -> [UInt32] { + guard try readUInt8() == AMF3Type.vectorUInt.rawValue else { + throw AMFSerializerError.deserialize + } + return [] + } + + /** + - seealso: 3.15 Vector Type, vector-number-type + */ + @discardableResult + func serialize(_ value: [Double]) -> Self { + writeUInt8(AMF3Type.vectorNumber.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + serializeU29(value.count << 1 | 0x01).writeUInt8(0x00) + for v in value { + writeDouble(v) + } + return self + } + + func deserialize() throws -> [Double] { + guard try readUInt8() == AMF3Type.vectorNumber.rawValue else { + throw AMFSerializerError.deserialize + } + return [] + } + + /** + - seealso: 3.15 Vector Type, vector-object-type + */ + @discardableResult + func serialize(_ value: [(any Sendable)?]) -> Self { + writeUInt8(AMF3Type.vectorObject.rawValue) + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + reference.objects.append(value) + serializeU29(value.count << 1 | 0x01).serializeUTF8("*") + for v in value { + serialize(v) + } + return self + } + + func deserialize() throws -> [(any Sendable)?] { + guard try readUInt8() == AMF3Type.array.rawValue else { + throw AMFSerializerError.deserialize + } + return [] + } + + /** + - seealso: 1.3.1 Variable Length Unsigned 29-bit Integer Encoding + */ + @discardableResult + private func serializeU29(_ value: Int) -> Self { + if value < Int(Int32.min) || Int(Int32.max) < value { + return serialize(Double(value)) + } + let value = UInt32(value) + switch UInt32(0) { + case value & 0xFFFFFF80: + return writeUInt8(UInt8(value & 0x7f)) + case value & 0xFFFFC000: + return writeUInt8(UInt8(value >> 7 | 0x80)) + .writeUInt8(UInt8(value & 0x7F)) + case value & 0xFFE00000: + return writeUInt8(UInt8(value >> 14 | 0x80)) + .writeUInt8(UInt8(value >> 7 | 0x80)) + .writeUInt8(UInt8(value & 0x7F)) + default: + return writeUInt8(UInt8(value >> 22 | 0x80)) + .writeUInt8(UInt8(value >> 15 | 0x80)) + .writeUInt8(UInt8(value >> 8 | 0x80)) + .writeUInt8(UInt8(value & 0xFF)) + } + } + + private func deserializeU29() throws -> Int { + var count = 1 + var result = 0 + var byte: UInt8 = try readUInt8() + + while byte & 0x80 != 0 && count < 4 { + result <<= 7 + result |= Int(byte & 0x7F) + byte = try readUInt8() + count += 1 + } + + if count < 4 { + result <<= 7 + result |= Int(byte) + } else { + result <<= 8 + result |= Int(byte) + } + + return result + } + + /** + - seealso: 1.3.2 Strings and UTF-8 + */ + @discardableResult + private func serializeUTF8(_ value: String) -> Self { + if value.isEmpty { + return serializeU29(0x01) + } + if let index: Int = reference.indexOf(value) { + return serializeU29(index << 1) + } + let utf8 = Data(value.utf8) + reference.strings.append(value) + return serializeU29(utf8.count << 1 | 0x01).writeBytes(utf8) + } + + private func deserializeUTF8() throws -> String { + let ref: Int = try deserializeU29() + if (ref & 0x01) == 0 { + return try reference.getString(ref >> 1) + } + let string: String = try readUTF8Bytes(length) + reference.strings.append(string) + return string + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift new file mode 100644 index 000000000..19d89032d --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/AMF/AMFFoundation.swift @@ -0,0 +1,151 @@ +import Foundation + +/// The singleton AMFUndefined object. +public let kAMFUndefined = AMFUndefined() + +/// The AMFObject typealias represents an object for AcrionScript. +public typealias AMFObject = [String: (any Sendable)?] + +/// The AMFUndefined structure represents an undefined for ActionScript. +public struct AMFUndefined: Sendable, CustomStringConvertible { + public var description: String { + "undefined" + } +} + +/// The AMFTypedObject structure represents a typed object for ActionScript. +public struct AMFTypedObject: Sendable { + /// The type name. + public let typeName: String + /// The data of object contents. + public let data: AMFObject +} + +// MARK: - +/// The AMFArray structure represents an array value for ActionScript. +public struct AMFArray: Sendable { + private(set) var data: [(any Sendable)?] + private(set) var dict: [String: (any Sendable)?] = [:] + + /// The length of an array. + public var length: Int { + data.count + } + + /// Creates a new instance containing the specified number of a single. + public init(count: Int) { + self.data = [(any Sendable)?](repeating: kAMFUndefined, count: count) + } + + /// Creates a new instance of data. + public init(data: [(any Sendable)?]) { + self.data = data + } + + init(_ dict: AMFObject) { + self.dict = dict + self.data = .init() + } +} + +extension AMFArray: ExpressibleByArrayLiteral { + // MARK: ExpressibleByArrayLiteral + public init (arrayLiteral elements: (any Sendable)?...) { + self = AMFArray(data: elements) + } + + /// Accesses the element at the specified position. + public subscript(i: Any) -> (any Sendable)? { + get { + if let i: Int = i as? Int { + return i < data.count ? data[i] : kAMFUndefined + } + if let i: String = i as? String { + if let i = Int(i) { + return i < data.count ? data[i] : kAMFUndefined + } + return dict[i] as (any Sendable) + } + return nil + } + set { + if let i = i as? Int { + if data.count <= i { + data += [(any Sendable)?](repeating: kAMFUndefined, count: i - data.count + 1) + } + data[i] = newValue + } + if let i = i as? String { + if let i = Int(i) { + if data.count <= i { + data += [(any Sendable)?](repeating: kAMFUndefined, count: i - data.count + 1) + } + data[i] = newValue + return + } + dict[i] = newValue + } + } + } +} + +extension AMFArray: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + public var debugDescription: String { + data.debugDescription + ":" + dict.debugDescription + } +} + +extension AMFArray: Equatable { + // MARK: Equatable + public static func == (lhs: AMFArray, rhs: AMFArray) -> Bool { + (lhs.data.description == rhs.data.description) && (lhs.dict.description == rhs.dict.description) + } +} + +// MARK: - +/// ActionScript 1.0 and 2.0 and flash.xml.XMLDocument in ActionScript 3.0 +/// - seealso: 2.17 XML Document Type (amf0-file-format-specification.pdf) +/// - seealso: 3.9 XMLDocument type (amf-file-format-spec.pdf) +public struct AMFXMLDocument: Sendable, CustomStringConvertible { + public var description: String { + data + } + + private let data: String + + /// Creates a new instance of string. + public init(data: String) { + self.data = data + } +} + +extension AMFXMLDocument: Equatable { + // MARK: Equatable + public static func == (lhs: AMFXMLDocument, rhs: AMFXMLDocument) -> Bool { + (lhs.description == rhs.description) + } +} + +// MARK: - +/// ActionScript 3.0 introduces a new XML type. +/// - seealso: 3.13 XML type (amf-file-format-spec.pdf) +public struct AMFXML: Sendable, CustomStringConvertible { + public var description: String { + data + } + + private let data: String + + /// Creates a new instance of string. + public init(data: String) { + self.data = data + } +} + +extension AMFXML: Equatable { + // MARK: Equatable + public static func == (lhs: AMFXML, rhs: AMFXML) -> Bool { + (lhs.description == rhs.description) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift new file mode 100644 index 000000000..7b375c5a3 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Codec/AVCDecoderConfigurationRecord.swift @@ -0,0 +1,130 @@ +import AVFoundation +import HaishinKit +import VideoToolbox + +protocol DecoderConfigurationRecord { + func makeFormatDescription() -> CMFormatDescription? +} + +// MARK: - +/* + - seealso: ISO/IEC 14496-15 2010 + */ +struct AVCDecoderConfigurationRecord: DecoderConfigurationRecord { + static let reserveLengthSizeMinusOne: UInt8 = 0x3F + static let reserveNumOfSequenceParameterSets: UInt8 = 0xE0 + static let reserveChromaFormat: UInt8 = 0xFC + static let reserveBitDepthLumaMinus8: UInt8 = 0xF8 + static let reserveBitDepthChromaMinus8 = 0xF8 + + var configurationVersion: UInt8 = 1 + var avcProfileIndication: UInt8 = 0 + var profileCompatibility: UInt8 = 0 + var avcLevelIndication: UInt8 = 0 + var lengthSizeMinusOneWithReserved: UInt8 = 0 + var numOfSequenceParameterSetsWithReserved: UInt8 = 0 + var sequenceParameterSets: [[UInt8]] = [] + var pictureParameterSets: [[UInt8]] = [] + + var chromaFormatWithReserve: UInt8 = 0 + var bitDepthLumaMinus8WithReserve: UInt8 = 0 + var bitDepthChromaMinus8WithReserve: UInt8 = 0 + var sequenceParameterSetExt: [[UInt8]] = [] + + var naluLength: Int32 { + Int32((lengthSizeMinusOneWithReserved >> 6) + 1) + } + + init() { + } + + init(data: Data) { + self.data = data + } + + func makeFormatDescription() -> CMFormatDescription? { + return pictureParameterSets[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let ppsBaseAddress = ppsBuffer.baseAddress else { + return nil + } + return sequenceParameterSets[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let spsBaseAddress = spsBuffer.baseAddress else { + return nil + } + let pointers: [UnsafePointer] = [ + spsBaseAddress.assumingMemoryBound(to: UInt8.self), + ppsBaseAddress.assumingMemoryBound(to: UInt8.self) + ] + let sizes: [Int] = [spsBuffer.count, ppsBuffer.count] + let nalUnitHeaderLength: Int32 = 4 + var formatDescriptionOut: CMFormatDescription? + CMVideoFormatDescriptionCreateFromH264ParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: pointers.count, + parameterSetPointers: pointers, + parameterSetSizes: sizes, + nalUnitHeaderLength: nalUnitHeaderLength, + formatDescriptionOut: &formatDescriptionOut + ) + return formatDescriptionOut + } + } + } +} + +extension AVCDecoderConfigurationRecord: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + let buffer = ByteArray() + .writeUInt8(configurationVersion) + .writeUInt8(avcProfileIndication) + .writeUInt8(profileCompatibility) + .writeUInt8(avcLevelIndication) + .writeUInt8(lengthSizeMinusOneWithReserved) + .writeUInt8(numOfSequenceParameterSetsWithReserved) + for i in 0.. CMFormatDescription? { + guard let vps = array[.vps], let sps = array[.sps], let pps = array[.pps] else { + return nil + } + return vps[0].withUnsafeBytes { (vpsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let vpsBaseAddress = vpsBuffer.baseAddress else { + return nil + } + return sps[0].withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let spsBaseAddress = spsBuffer.baseAddress else { + return nil + } + return pps[0].withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let ppsBaseAddress = ppsBuffer.baseAddress else { + return nil + } + var formatDescriptionOut: CMFormatDescription? + let pointers: [UnsafePointer] = [ + vpsBaseAddress.assumingMemoryBound(to: UInt8.self), + spsBaseAddress.assumingMemoryBound(to: UInt8.self), + ppsBaseAddress.assumingMemoryBound(to: UInt8.self) + ] + let sizes: [Int] = [vpsBuffer.count, spsBuffer.count, ppsBuffer.count] + let nalUnitHeaderLength: Int32 = 4 + CMVideoFormatDescriptionCreateFromHEVCParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: pointers.count, + parameterSetPointers: pointers, + parameterSetSizes: sizes, + nalUnitHeaderLength: nalUnitHeaderLength, + extensions: nil, + formatDescriptionOut: &formatDescriptionOut + ) + return formatDescriptionOut + } + } + } + } +} + +extension HEVCDecoderConfigurationRecord: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + let buffer = ByteArray() + .writeUInt8(configurationVersion) + return buffer.data + } + set { + let buffer = ByteArray(data: newValue) + do { + configurationVersion = try buffer.readUInt8() + let a = try buffer.readUInt8() + generalProfileSpace = a >> 6 + generalTierFlag = a & 0x20 > 0 + generalProfileIdc = a & 0x1F + generalProfileCompatibilityFlags = try buffer.readUInt32() + generalConstraintIndicatorFlags = UInt64(try buffer.readUInt32()) << 16 | UInt64(try buffer.readUInt16()) + generalLevelIdc = try buffer.readUInt8() + minSpatialSegmentationIdc = try buffer.readUInt16() & 0xFFF + parallelismType = try buffer.readUInt8() & 0x3 + chromaFormat = try buffer.readUInt8() & 0x3 + bitDepthLumaMinus8 = try buffer.readUInt8() & 0x7 + bitDepthChromaMinus8 = try buffer.readUInt8() & 0x7 + avgFrameRate = try buffer.readUInt16() + let b = try buffer.readUInt8() + constantFrameRate = b >> 6 + numTemporalLayers = b & 0x38 >> 3 + temporalIdNested = b & 0x6 >> 1 + lengthSizeMinusOne = b & 0x3 + numberOfArrays = try buffer.readUInt8() + for _ in 0.. (any DecoderConfigurationRecord)? { + guard let configurationBox else { + return nil + } + switch mediaSubType { + case .h264: + return AVCDecoderConfigurationRecord(data: configurationBox) + case .hevc: + return HEVCDecoderConfigurationRecord(data: configurationBox) + default: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift new file mode 100644 index 000000000..0ca1db14a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/IncomingStream+Extension.swift @@ -0,0 +1,11 @@ +import CoreMedia +import HaishinKit + +extension IncomingStream { + func append(_ message: RTMPVideoMessage, presentationTimeStamp: CMTime, formatDesciption: CMFormatDescription?) { + guard let buffer = message.makeSampleBuffer(presentationTimeStamp, formatDesciption: formatDesciption) else { + return + } + append(buffer) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift new file mode 100644 index 000000000..a7487429b --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Extension/URL+Extension.swift @@ -0,0 +1,32 @@ +import Foundation + +extension URL { + var absoluteWithoutAuthenticationString: String { + guard var components = URLComponents(string: absoluteString) else { + return absoluteString + } + components.password = nil + components.user = nil + return components.url?.absoluteString ?? absoluteString + } + + var absoluteWithoutQueryString: String { + guard let query: String = self.query else { + return self.absoluteString + } + return absoluteString.replacingOccurrences(of: "?" + query, with: "") + } + + func dictionaryFromQuery() -> [String: String] { + var result: [String: String] = [:] + guard let query = URLComponents(string: absoluteString)?.queryItems else { + return result + } + for item in query { + if let value: String = item.value { + result[item.name] = value + } + } + return result + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift new file mode 100644 index 000000000..7b3cd107f --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPAuthenticator.swift @@ -0,0 +1,56 @@ +import Foundation + +final class RTMPAuthenticator { + enum Error: Swift.Error { + case noCredential + case failedToAuth(description: String) + } + + private static func makeSanJoseAuthCommand(_ url: URL, description: String) -> String { + var command: String = url.absoluteString + + guard let index = description.firstIndex(of: "?") else { + return command + } + + let query = String(description[description.index(index, offsetBy: 1)...]) + let challenge = String(format: "%08x", UInt32.random(in: 0...UInt32.max)) + let dictionary = URL(string: "http://localhost?" + query)!.dictionaryFromQuery() + + var response = MD5.base64("\(url.user!)\(dictionary["salt"]!)\(url.password!)") + if let opaque = dictionary["opaque"] { + command += "&opaque=\(opaque)" + response += opaque + } else if let challenge: String = dictionary["challenge"] { + response += challenge + } + + response = MD5.base64("\(response)\(challenge)") + command += "&challenge=\(challenge)&response=\(response)" + + return command + } + + func makeCommand(_ command: String, status: RTMPStatus) -> Result { + switch true { + case status.description.contains("reason=needauth"): + guard + let uri = URL(string: command) else { + return .failure(Error.noCredential) + } + let command = Self.makeSanJoseAuthCommand(uri, description: status.description) + return .success(command) + case status.description.contains("authmod=adobe"): + guard + let uri = URL(string: command), + let user = uri.user, uri.password != nil else { + return .failure(Error.noCredential) + } + let query = uri.query ?? "" + let command = uri.absoluteString + (query.isEmpty ? "?" : "&") + "authmod=adobe&user=\(user)" + return .success(command) + default: + return .failure(Error.failedToAuth(description: status.description)) + } + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift new file mode 100644 index 000000000..f68fcff07 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPChunk.swift @@ -0,0 +1,318 @@ +import Foundation + +private let kRTMPExtendTimestampSize = 4 + +enum RTMPChunkError: Swift.Error { + case bufferUnderflow + case unknowChunkType(value: UInt8) +} + +enum RTMPChunkType: UInt8 { + case zero = 0 + case one = 1 + case two = 2 + case three = 3 + + var headerSize: Int { + switch self { + case .zero: + return 11 + case .one: + return 7 + case .two: + return 3 + case .three: + return 0 + } + } +} + +enum RTMPChunkStreamId: UInt16 { + case control = 0x02 + case command = 0x03 + case audio = 0x04 + case video = 0x05 + case data = 0x08 +} + +final class RTMPChunkMessageHeader { + static let chunkSize = 128 + static let maxTimestamp: UInt32 = 0xFFFFFF + + var timestamp: UInt32 = 0 + var messageLength: Int = 0 { + didSet { + guard payload.count != messageLength else { + return + } + payload = Data(count: messageLength) + position = 0 + } + } + var messageTypeId: UInt8 = 0 + var messageStreamId: UInt32 = 0 + private(set) var payload = Data() + private var position = 0 + + init() { + } + + init(timestmap: UInt32, messageLength: Int, messageTypeId: UInt8, messageStreamId: UInt32) { + self.timestamp = timestmap + self.messageLength = messageLength + self.messageTypeId = messageTypeId + self.messageStreamId = messageStreamId + self.payload = Data(count: messageLength) + } + + func put(_ buffer: RTMPChunkBuffer, chunkSize: Int) throws { + let length = min(chunkSize, messageLength - position) + if buffer.remaining < length { + throw RTMPChunkError.bufferUnderflow + } + self.payload.replaceSubrange(position.. (any RTMPMessage)? { + if position < payload.count { + return nil + } + switch messageTypeId { + case 0x01: + return RTMPSetChunkSizeMessage(self) + case 0x02: + return RTMPAbortMessge(self) + case 0x03: + return RTMPAcknowledgementMessage(self) + case 0x04: + return RTMPUserControlMessage(self) + case 0x05: + return RTMPWindowAcknowledgementSizeMessage(self) + case 0x06: + return RTMPSetPeerBandwidthMessage(self) + case 0x08: + return RTMPAudioMessage(self) + case 0x09: + return RTMPVideoMessage(self) + case 0x0F: + return RTMPDataMessage(self, objectEncoding: .amf3) + case 0x10: + return RTMPSharedObjectMessage(self, objectEncoding: .amf3) + case 0x11: + return RTMPCommandMessage(self, objectEncoding: .amf3) + case 0x12: + return RTMPDataMessage(self, objectEncoding: .amf0) + case 0x13: + return RTMPSharedObjectMessage(self, objectEncoding: .amf0) + case 0x14: + return RTMPCommandMessage(self, objectEncoding: .amf0) + case 0x16: + return RTMPAggregateMessage(self) + default: + return nil + } + } +} + +final class RTMPChunkBuffer { + static let headerSize = 3 + 11 + 4 + + var payload: Data { + data[position.. Self { + length = position + position = 0 + return self + } + + func get(_ length: Int) -> Data { + defer { + position += length + } + return data[position.. (RTMPChunkType, UInt16) { + let rawValue = (data[position] & 0b11000000) >> 6 + guard let type = RTMPChunkType(rawValue: rawValue) else { + throw RTMPChunkError.unknowChunkType(value: rawValue) + } + switch data[position] & 0b00111111 { + case 0: + defer { + position += 2 + } + return (type, UInt16(data[position + 1]) + 64) + case 1: + defer { + position += 3 + } + return (type, UInt16(data: data[position + 1...position + 2]) + 64) + default: + defer { + position += 1 + } + return (type, UInt16(data[position] & 0b00111111)) + } + } + + func getMessageHeader(_ type: RTMPChunkType, messageHeader: RTMPChunkMessageHeader) throws { + if remaining < type.headerSize { + throw RTMPChunkError.bufferUnderflow + } + switch type { + case .zero: + messageHeader.timestamp = UInt32(data: data[position.. AnyIterator { + let payload = message.payload + let length = payload.count + var offset = 0 + var remaining = min(chunkSize, length) + return AnyIterator { () -> Data? in + guard 0 < remaining else { + return nil + } + defer { + self.position = 0 + offset += remaining + remaining = min(self.chunkSize, length - offset) + } + if offset == 0 { + self.putBasicHeader(chunkType, chunkStreamId: chunkStreamId) + self.putMessageHeader(chunkType, length: length, message: message) + } else { + self.putBasicHeader(.three, chunkStreamId: chunkStreamId) + } + self.data.replaceSubrange(self.position.. = ["rtmp", "rtmps"] + /// The supported fourCcList. + public static let supportedFourCcList = [RTMPVideoFourCC.hevc.description, RTMPAudioFourCC.opus.description] + /// The default RTMP port is 1935. + public static let defaultPort: Int = 1935 + /// The default RTMPS port is 443. + public static let defaultSecurePort: Int = 443 + /// The default flashVer is FMLE/3.0 (compatible; FMSc/1.0). + public static let defaultFlashVer: String = "FMLE/3.0 (compatible; FMSc/1.0)" + /// The default chunk size for RTMPConnection. + public static let defaultChunkSizeS: Int = 1024 * 8 + /// The default capabilities for RTMPConnection. + public static let defaultCapabilities: Int = 239 + /// The default object encoding for RTMPConnection class. + public static let defaultObjectEncoding: RTMPObjectEncoding = .amf0 + /// The default an rtmp request time out value (ms). + public static let defaultRequestTimeout: UInt64 = 3000 + /// The supported audio fourCc Information. + public static let supportedAudioFourCcInfoMap: AMFObject = [ + RTMPAudioFourCC.opus.description: FourCcInfoMask.canEncode.rawValue + ] + /// The supported video fourCc Information. + public static let supportedVideoFourCcInfoMap: AMFObject = [ + RTMPVideoFourCC.hevc.description: FourCcInfoMask.canDecode.rawValue | FourCcInfoMask.canEncode.rawValue + ] + private static let connectTransactionId = 1 + + /** + - NetStatusEvent#info.code for NetConnection + - see: https://help.adobe.com/en_US/air/reference/html/flash/events/NetStatusEvent.html#NET_STATUS + */ + public enum Code: String { + case callBadVersion = "NetConnection.Call.BadVersion" + case callFailed = "NetConnection.Call.Failed" + case callProhibited = "NetConnection.Call.Prohibited" + case connectAppshutdown = "NetConnection.Connect.AppShutdown" + case connectClosed = "NetConnection.Connect.Closed" + case connectFailed = "NetConnection.Connect.Failed" + case connectIdleTimeOut = "NetConnection.Connect.IdleTimeOut" + case connectInvalidApp = "NetConnection.Connect.InvalidApp" + case connectNetworkChange = "NetConnection.Connect.NetworkChange" + case connectRejected = "NetConnection.Connect.Rejected" + case connectSuccess = "NetConnection.Connect.Success" + + public var level: String { + switch self { + case .callBadVersion: + return "error" + case .callFailed: + return "error" + case .callProhibited: + return "error" + case .connectAppshutdown: + return "error" + case .connectClosed: + return "status" + case .connectFailed: + return "error" + case .connectIdleTimeOut: + return "status" + case .connectInvalidApp: + return "error" + case .connectNetworkChange: + return "status" + case .connectRejected: + return "error" + case .connectSuccess: + return "status" + } + } + + func status(_ description: String) -> RTMPStatus { + return .init(code: rawValue, level: level, description: description) + } + } + + enum SupportVideo: UInt16 { + case unused = 0x0001 + case jpeg = 0x0002 + case sorenson = 0x0004 + case homebrew = 0x0008 + case vp6 = 0x0010 + case vp6Alpha = 0x0020 + case homebrewv = 0x0040 + case h264 = 0x0080 + case all = 0x00FF + } + + enum SupportSound: UInt16 { + case none = 0x0001 + case adpcm = 0x0002 + case mp3 = 0x0004 + case intel = 0x0008 + case unused = 0x0010 + case nelly8 = 0x0020 + case nelly = 0x0040 + case g711A = 0x0080 + case g711U = 0x0100 + case nelly16 = 0x0200 + case aac = 0x0400 + case speex = 0x0800 + case all = 0x0FFF + } + + enum VideoFunction: UInt8 { + case clientSeek = 1 + } + + enum FourCcInfoMask: Int { + case canDecode = 0x01 + case canEncode = 0x02 + case canForward = 0x04 + } + + enum CapsEx: Int { + case recoonect = 0x01 + case multitrack = 0x02 + case modEx = 0x04 + case timestampNanoOffset = 0x08 + } + + /// The URL of .swf. + public let swfUrl: String? + /// The URL of an HTTP referer. + public let pageUrl: String? + /// The name of application. + public let flashVer: String + /// The fourCcList for Enhancing NetConnection connect Command. + public let fourCcList: [String]? + /// The audio fourCc information for Enhancing NetConnection connect Command. + public let audioFourCcInfoMap: AMFObject? + /// The video fourCc information for Enhancing NetConnection connect Command. + public let videoFourCcInfoMap: AMFObject? + /// The capability flags for Enhancing NetConnection connect Command. + public let capsEx: Int + /// The time to wait for TCP/IP Handshake done. + public let timeout: Int + /// The RTMP request timeout value. Defaul value is 500 msec. + public let requestTimeout: UInt64 + /// The outgoing RTMPChunkSize. + public let chunkSize: Int + /// The dispatchQos for socket. + public let qualityOfService: DispatchQoS + /// The URI passed to the Self.connect() method. + public private(set) var uri: URL? + /// The instance connected to server(true) or not(false). + @Published public private(set) var connected = false + /// The stream of events you receive RTMP status events from a service. + public var status: AsyncStream { + AsyncStream { continuation in + statusContinuation = continuation + } + } + /// The object encoding for this RTMPConnection instance. + public let objectEncoding = RTMPConnection.defaultObjectEncoding + + var newTransaction: Int { + currentTransactionId += 1 + return currentTransactionId + } + + private var socket: RTMPSocket? + private var chunks: [UInt16: RTMPChunkMessageHeader] = [:] + private var streams: [RTMPStream] = [] + private var sequence: Int64 = 0 + private var bandWidth: UInt32 = 0 + private var handshake: RTMPHandshake = .init() + private var arguments: [(any Sendable)?] = [] + private var readyState: ReadyState = .uninitialized { + didSet { + logger.info(oldValue, "=>", readyState) + } + } + private var chunkSizeC = RTMPChunkMessageHeader.chunkSize { + didSet { + guard chunkSizeC != oldValue else { + return + } + inputBuffer.chunkSize = chunkSizeC + } + } + private var chunkSizeS = RTMPChunkMessageHeader.chunkSize { + didSet { + guard chunkSizeS != oldValue else { + return + } + outputBuffer.chunkSize = chunkSizeS + } + } + private var operations: [Int: CheckedContinuation] = [:] + private var inputBuffer = RTMPChunkBuffer() + private var windowSizeC = RTMPConnection.defaultWindowSizeS { + didSet { + guard connected else { + return + } + doOutput(.zero, chunkStreamId: .control, message: RTMPWindowAcknowledgementSizeMessage(size: UInt32(windowSizeC))) + } + } + private var windowSizeS = RTMPConnection.defaultWindowSizeS + private var outputBuffer = RTMPChunkBuffer() + private let authenticator = RTMPAuthenticator() + private var networkMonitor: NetworkMonitor? + private var statusContinuation: AsyncStream.Continuation? + private var currentTransactionId = RTMPConnection.connectTransactionId + + /// Creates a new connection with E-RTMP command parameters. + /// + /// You can specify the fourCcList parameter of the connect command defined in E-RTMP. + /// In some RTMP server implementations where these parameters are not supported, you can work around the issue by specifying them as shown below. + /// + /// ## Example code: + /// ```swift + /// let connection = RTMPConnection( + /// fourCcList: nil, + /// videoFourCcInfoMap: nil, + /// audioFourCcInfoMap: nil, + /// capsEx: 0 + /// ) + /// ``` + public init( + swfUrl: String? = nil, + pageUrl: String? = nil, + flashVer: String = RTMPConnection.defaultFlashVer, + fourCcList: [String]? = RTMPConnection.supportedFourCcList, + videoFourCcInfoMap: AMFObject? = RTMPConnection.supportedVideoFourCcInfoMap, + audioFourCcInfoMap: AMFObject? = RTMPConnection.supportedAudioFourCcInfoMap, + capsEx: Int = 0, + timeout: Int = RTMPConnection.defaultTimeout, + requestTimeout: UInt64 = RTMPConnection.defaultRequestTimeout, + chunkSize: Int = RTMPConnection.defaultChunkSizeS, + qualityOfService: DispatchQoS = .userInitiated) { + self.swfUrl = swfUrl + self.pageUrl = pageUrl + self.flashVer = flashVer + self.timeout = timeout + self.fourCcList = fourCcList + self.videoFourCcInfoMap = videoFourCcInfoMap + self.audioFourCcInfoMap = audioFourCcInfoMap + self.capsEx = capsEx + self.requestTimeout = requestTimeout + self.chunkSize = chunkSize + self.qualityOfService = qualityOfService + } + + deinit { + streams.removeAll() + } + + /// Calls a command or method on RTMP Server. + public func call(_ commandName: String, arguments: (any Sendable)?...) async throws -> RTMPResponse { + guard connected else { + throw Error.invalidState + } + return try await withCheckedThrowingContinuation { continutation in + let message = RTMPCommandMessage( + streamId: 0, + transactionId: newTransaction, + objectEncoding: objectEncoding, + commandName: commandName, + commandObject: nil, + arguments: arguments + ) + Task { + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + guard let operation = operations.removeValue(forKey: message.transactionId) else { + return + } + operation.resume(throwing: Error.requestTimedOut) + } + operations[message.transactionId] = continutation + doOutput(.zero, chunkStreamId: .command, message: message) + } + } + + /// Creates a two-way connection to an application on RTMP Server. + public func connect(_ command: String, arguments: (any Sendable)?...) async throws -> RTMPResponse { + guard !connected else { + throw Error.invalidState + } + guard let uri = URL(string: command), let scheme = uri.scheme, let host = uri.host, Self.supportedProtocols.contains(scheme) else { + throw Error.unsupportedCommand(command) + } + self.uri = uri + self.arguments = arguments + let secure = uri.scheme == "rtmps" || uri.scheme == "rtmpts" + handshake.clear() + chunks.removeAll() + sequence = 0 + readyState = .uninitialized + chunkSizeC = RTMPChunkMessageHeader.chunkSize + chunkSizeS = RTMPChunkMessageHeader.chunkSize + currentTransactionId = Self.connectTransactionId + socket = RTMPSocket(qualityOfService: qualityOfService, securityLevel: secure ? .negotiatedSSL : .none) + networkMonitor = await socket?.makeNetworkMonitor() + guard let socket, let networkMonitor else { + throw Error.invalidState + } + do { + let result: RTMPResponse = try await withCheckedThrowingContinuation { continutation in + Task { + do { + try await socket.connect(host, port: uri.port ?? (secure ? Self.defaultSecurePort : Self.defaultPort)) + } catch { + continutation.resume(throwing: error) + return + } + do { + readyState = .versionSent + await socket.send(handshake.c0c1packet) + operations[Self.connectTransactionId] = continutation + for await data in await socket.recv() { + try await listen(data) + } + try? await close() + } catch { + try? await close() + } + } + } + Task { + for await event in await networkMonitor.event { + dispatch(event) + } + } + for stream in streams { + await stream.dispatch(.reset) + await stream.createStream() + } + return result + } catch let error as RTMPSocket.Error { + switch error { + case .connectionTimedOut: + throw Error.connectionTimedOut + case .connectionNotEstablished(let socketError): + throw Error.socketErrorOccurred(socketError) + default: + throw Error.socketErrorOccurred(nil) + } + } catch let error as Error { + switch error { + case .requestFailed(let response): + guard let status = response.status else { + throw error + } + // Handles an RTMP auth. + if status.code == RTMPConnection.Code.connectRejected.rawValue { + switch authenticator.makeCommand(command, status: status) { + case .success(let command): + await socket.close() + return try await connect(command, arguments: arguments) + case .failure: + throw error + } + } else { + throw error + } + default: + throw error + } + } catch { + throw error + } + } + + /// Closes the connection from the server. + public func close() async throws { + guard readyState != .uninitialized else { + throw Error.invalidState + } + + uri = nil + for stream in streams { + if await stream.fcPublishName == nil { + _ = try? await stream.close() + } else { + await stream.deleteStream() + } + } + await socket?.close() + await networkMonitor?.stopRunning() + + let status = readyState == .handshakeDone ? + Code.connectClosed.status("") : + Code.connectFailed.status("") + + connected = false + readyState = .uninitialized + + if let operation = operations.removeValue(forKey: Self.connectTransactionId) { + operation.resume(throwing: Error.requestFailed(response: .init(status: status))) + } else { + statusContinuation?.yield(status) + } + } + + @discardableResult + func doOutput(_ type: RTMPChunkType, chunkStreamId: RTMPChunkStreamId, message: some RTMPMessage) -> Int { + if logger.isEnabledFor(level: .trace) { + logger.trace("<<", message) + } + let iterator = outputBuffer.putMessage(type, chunkStreamId: chunkStreamId.rawValue, message: message) + Task { + await socket?.send(iterator) + } + return message.payload.count + } + + func addStream(_ stream: RTMPStream) { + streams.append(stream) + } + + private func listen(_ data: Data) async throws { + switch readyState { + case .versionSent: + handshake.put(data) + guard handshake.hasS0S1Packet else { + return + } + await socket?.send(handshake.c2packet()) + readyState = .ackSent + try await listen(.init()) + case .ackSent: + handshake.put(data) + guard handshake.hasS2Packet else { + return + } + readyState = .handshakeDone + guard let message = makeConnectionMessage() else { + try await close() + break + } + await networkMonitor?.startRunning() + doOutput(.zero, chunkStreamId: .command, message: message) + case .handshakeDone: + inputBuffer.put(data) + var rollbackPosition = inputBuffer.position + do { + while inputBuffer.hasRemaining { + rollbackPosition = inputBuffer.position + let (chunkType, chunkStreamId) = try inputBuffer.getBasicHeader() + if chunks[chunkStreamId] == nil { + chunks[chunkStreamId] = RTMPChunkMessageHeader() + } + if let messageHeader = chunks[chunkStreamId] { + try inputBuffer.getMessageHeader(chunkType, messageHeader: messageHeader) + if let message = messageHeader.makeMessage() { + await dispatch(message, type: chunkType) + messageHeader.reset() + } + } + } + } catch RTMPChunkError.unknowChunkType(let value) { + logger.error("Received unknow chunk type =", value) + try await close() + } catch RTMPChunkError.bufferUnderflow { + inputBuffer.position = rollbackPosition + } + default: + break + } + } + + private func dispatch(_ event: NetworkMonitorEvent) { + switch event { + case .status(let report), .publishInsufficientBWOccured(let report): + if windowSizeS * (sequence + 1) <= report.totalBytesIn { + doOutput(sequence == 0 ? .zero : .one, chunkStreamId: .control, message: RTMPAcknowledgementMessage(sequence: UInt32(report.totalBytesIn))) + sequence += 1 + } + case .reset: + // noop + break + } + for stream in streams { + Task { await stream.dispatch(event) } + } + } + + private func dispatch(_ message: some RTMPMessage, type: RTMPChunkType) async { + if logger.isEnabledFor(level: .trace) { + logger.trace(">>", message) + } + if message.streamId == 0 { + switch message { + case let message as RTMPSetChunkSizeMessage: + chunkSizeC = Int(message.size) + case let message as RTMPWindowAcknowledgementSizeMessage: + windowSizeC = Int64(message.size) + windowSizeS = Int64(message.size) + case let message as RTMPSetPeerBandwidthMessage: + bandWidth = message.size + case let message as RTMPCommandMessage: + let response = RTMPResponse(message) + defer { + if let status = response.status { + statusContinuation?.yield(status) + } + } + guard let responder = operations.removeValue(forKey: message.transactionId) else { + switch message.commandName { + case "close": + try? await close() + default: + break + } + return + } + switch message.commandName { + case "_result": + if message.transactionId == Self.connectTransactionId { + connected = true + chunkSizeS = chunkSize + doOutput(.zero, chunkStreamId: .control, message: RTMPSetChunkSizeMessage(size: UInt32(chunkSizeS))) + } + responder.resume(returning: response) + default: + responder.resume(throwing: Error.requestFailed(response: response)) + } + case let message as RTMPSharedObjectMessage: + guard let remotePath = uri?.absoluteWithoutQueryString else { + return + } + let persistence = (message.flags[3] & 2) != 0 + await RTMPSharedObject.getRemote(withName: message.sharedObjectName, remotePath: remotePath, persistence: persistence).on(message: message) + case let message as RTMPUserControlMessage: + switch message.event { + case .ping: + doOutput(.zero, chunkStreamId: .control, message: RTMPUserControlMessage(event: .pong, value: message.value)) + default: + for stream in streams where await stream.id == message.value { + Task { await stream.dispatch(message, type: type) } + } + } + default: + break + } + } else { + for stream in streams where await stream.id == message.streamId { + Task { await stream.dispatch(message, type: type) } + } + } + } + + private func makeConnectionMessage() -> RTMPCommandMessage? { + guard let uri else { + return nil + } + var app = uri.path.isEmpty ? "" : String(uri.path[uri.path.index(uri.path.startIndex, offsetBy: 1)...]) + if let query = uri.query { + app += "?" + query + } + return RTMPCommandMessage( + streamId: 0, + transactionId: Self.connectTransactionId, + // "connect" must be a objectEncoding = 0 + objectEncoding: .amf0, + commandName: "connect", + commandObject: [ + "objectEncoding": objectEncoding.rawValue, + "app": app, + "flashVer": flashVer, + "swfUrl": swfUrl, + "tcUrl": uri.absoluteWithoutAuthenticationString, + "fpad": false, + "capabilities": Self.defaultCapabilities, + "audioCodecs": SupportSound.aac.rawValue, + "videoCodecs": SupportVideo.h264.rawValue, + "videoFunction": VideoFunction.clientSeek.rawValue, + "pageUrl": pageUrl, + // Enhancing NetConnection connect Command + "fourCcList": fourCcList, + "videoFourCcInfoMap": videoFourCcInfoMap, + "audioFourCcInfoMap": audioFourCcInfoMap, + "capsEx": capsEx + ], + arguments: arguments + ) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPEnhanced.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPEnhanced.swift new file mode 100644 index 000000000..740e16178 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPEnhanced.swift @@ -0,0 +1,130 @@ +import HaishinKit + +enum RTMPAudioFourCC: UInt32, CustomStringConvertible { + case ac3 = 0x61632D33 // ac-3 + case eac3 = 0x65632D33 // ec-3 + case opus = 0x4F707573 // Opus + case mp3 = 0x2E6D7033 // .mp3 + case flac = 0x664C6143 // fLaC + case aac = 0x6D703461 // mp4a + + var description: String { + switch self { + case .ac3: + return "ac-3" + case .eac3: + return "ex-3" + case .opus: + return "Opus" + case .mp3: + return ".mp3" + case .flac: + return "fLaC" + case .aac: + return "mp4a" + } + } + + var isSupported: Bool { + switch self { + case .ac3: + return false + case .eac3: + return false + case .opus: + return true + case .mp3: + return false + case .flac: + return false + case .aac: + return false + } + } +} + +enum RTMPAudioPacketType: UInt8 { + case sequenceStart = 0 + case codedFrames = 1 + case sequenceEnd = 2 + case multiChannelConfig = 4 + case multiTrack = 5 + case modEx = 7 +} + +enum RTMPAudioPacketModExType: Int { + case timestampOffsetNano = 0 +} + +enum RTMPAVMultiTrackType: Int { + case oneTrack = 0 + case manyTracks = 1 + case manyTracksManyCOdecs = 2 +} + +enum RTMPAudioChannelOrder: Int { + case unspecified = 0 + case native = 1 + case custom = 2 +} + +enum RTMPVideoFourCC: UInt32, CustomStringConvertible { + case av1 = 0x61763031 // av01 + case vp9 = 0x76703039 // vp09 + case hevc = 0x68766331 // hvc1 + + var description: String { + switch self { + case .av1: + return "av01" + case .vp9: + return "vp09" + case .hevc: + return "hvc1" + } + } + + var isSupported: Bool { + switch self { + case .av1: + return false + case .vp9: + return false + case .hevc: + return true + } + } +} + +enum RTMPVideoPacketType: UInt8 { + case sequenceStart = 0 + case codedFrames = 1 + case sequenceEnd = 2 + case codedFramesX = 3 + case metadata = 4 + case mpeg2TSSequenceStart = 5 +} + +extension AudioCodecSettings.Format { + var codecid: Int { + switch self { + case .aac: + return Int(RTMPAudioCodec.aac.rawValue) + case .opus: + return Int(RTMPAudioFourCC.opus.rawValue) + case .pcm: + return Int(RTMPAudioCodec.pcm.rawValue) + } + } +} + +extension VideoCodecSettings.Format { + var codecid: Int { + switch self { + case .h264: + return Int(RTMPVideoCodec.avc.rawValue) + case .hevc: + return Int(RTMPVideoFourCC.hevc.rawValue) + } + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPFoundation.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPFoundation.swift new file mode 100644 index 000000000..f8f2bc848 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPFoundation.swift @@ -0,0 +1,255 @@ +import AVFoundation +import HaishinKit + +/// The type of flv supports aac packet types. +enum RTMPAACPacketType: UInt8 { + /// The sequence data. + case seq = 0 + /// The raw data. + case raw = 1 +} + +/// The type of flv supports avc packet types. +enum RTMPAVCPacketType: UInt8 { + /// The sequence data. + case seq = 0 + /// The NAL unit data. + case nal = 1 + /// The end of stream data. + case eos = 2 +} + +/// The type of flv supports audio codecs. +enum RTMPAudioCodec: UInt8 { + /// The PCM codec. + case pcm = 0 + /// The ADPCM codec. + case adpcm = 1 + /// The MP3 codec. + case mp3 = 2 + /// The PCM little endian codec. + case pcmle = 3 + /// The Nellymoser 16kHz codec. + case nellymoser16K = 4 + /// The Nellymoser 8kHz codec. + case nellymoser8K = 5 + /// The Nellymoser codec. + case nellymoser = 6 + /// The G.711 A-law codec. + case g711A = 7 + /// The G.711 mu-law codec. + case g711MU = 8 + /// The signal FOURCC mode. E-RTMP. + case exheader = 9 + /// The AAC codec. + case aac = 10 + /// The Speex codec. + case speex = 11 + /// The MP3 8kHz codec. + case mp3_8k = 14 + /// The Device-specific sound. + case device = 15 + /// The undefined codec + case unknown = 0xFF + + var isSupported: Bool { + switch self { + case .aac: + return true + default: + return false + } + } + + var formatID: AudioFormatID { + switch self { + case .pcm: + return kAudioFormatLinearPCM + case .mp3: + return kAudioFormatMPEGLayer3 + case .pcmle: + return kAudioFormatLinearPCM + case .aac: + return kAudioFormatMPEG4AAC + case .mp3_8k: + return kAudioFormatMPEGLayer3 + default: + return 0 + } + } + + var formatFlags: AudioFormatFlags { + switch self { + case .aac: + return AudioFormatFlags(AudioSpecificConfig.AudioObjectType.aacMain.rawValue) + default: + return 0 + } + } + + var headerSize: Int { + switch self { + case .aac: + return 2 + default: + return 1 + } + } + + func audioStreamBasicDescription(_ payload: Data) -> AudioStreamBasicDescription? { + guard isSupported, !payload.isEmpty else { + return nil + } + guard + let soundRate = RTMPSoundRate(rawValue: (payload[0] & 0b00001100) >> 2), + let soundType = RTMPSoundType(rawValue: (payload[0] & 0b00000001)) else { + return nil + } + return AudioStreamBasicDescription( + mSampleRate: soundRate.floatValue, + mFormatID: formatID, + mFormatFlags: formatFlags, + mBytesPerPacket: 0, + mFramesPerPacket: 1024, + mBytesPerFrame: 0, + mChannelsPerFrame: soundType == .stereo ? 2 : 1, + mBitsPerChannel: 0, + mReserved: 0 + ) + } +} + +/// The type of flv supports video frame types. +enum RTMPFrameType: UInt8 { + /// The keyframe. + case key = 1 + /// The inter frame. + case inter = 2 + /// The disposable inter frame. + case disposable = 3 + /// The generated keydrame. + case generated = 4 + /// The video info or command frame. + case command = 5 +} + +enum RTMPSoundRate: UInt8 { + /// The sound rate of 5,500.0kHz. + case kHz5_5 = 0 + /// Ths sound rate of 11,000.0kHz. + case kHz11 = 1 + /// The sound rate of 22,050.0kHz. + case kHz22 = 2 + /// Ths sound rate of 44,100.0kHz. + case kHz44 = 3 + + /// The float typed value. + var floatValue: Float64 { + switch self { + case .kHz5_5: + return 5500 + case .kHz11: + return 11025 + case .kHz22: + return 22050 + case .kHz44: + return 44100 + } + } +} + +/// The type of flv supports audio sound size. +enum RTMPSoundSize: UInt8 { + /// The 8bit sound. + case snd8bit = 0 + /// The 16bit sound. + case snd16bit = 1 +} + +/// The type of flv supports audio sound channel type.. +enum RTMPSoundType: UInt8 { + /// The mono sound. + case mono = 0 + /// The stereo sound. + case stereo = 1 +} + +/// The type of flv tag. +enum RTMPTagType: UInt8 { + /// The Audio tag, + case audio = 8 + /// The Video tag. + case video = 9 + /// The Data tag. + case data = 18 + + var streamId: UInt16 { + switch self { + case .audio, .video: + return UInt16(rawValue) + case .data: + return 0 + } + } + + var headerSize: Int { + switch self { + case .audio: + return 2 + case .video: + return 5 + case .data: + return 0 + } + } +} + +/// The type of flv supports video codecs. +enum RTMPVideoCodec: UInt8 { + /// The JPEG codec. + case jpeg = 1 + /// The Sorenson H263 codec. + case sorensonH263 = 2 + /// The Screen video codec. + case screen1 = 3 + /// The On2 VP6 codec. + case on2VP6 = 4 + /// The On2 VP6 with alpha channel codec. + case on2VP6Alpha = 5 + /// The Screen video version2 codec. + case screen2 = 6 + /// The AVC codec. + case avc = 7 + /// The unknown codec. + case unknown = 0xFF + + var isSupported: Bool { + switch self { + case .jpeg: + return false + case .sorensonH263: + return false + case .screen1: + return false + case .on2VP6: + return false + case .on2VP6Alpha: + return false + case .screen2: + return false + case .avc: + return true + case .unknown: + return false + } + } +} + +extension CMSampleBuffer { + func getCompositionTime(_ offset: TimeInterval) -> Int32 { + guard decodeTimeStamp.isValid else { + return 0 + } + return max(0, Int32(((presentationTimeStamp - decodeTimeStamp).seconds + offset) * 1000)) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPHandshake.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPHandshake.swift new file mode 100644 index 000000000..7f760598a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPHandshake.swift @@ -0,0 +1,50 @@ +import Foundation +import HaishinKit + +final class RTMPHandshake { + static let sigSize: Int = 1536 + static let protocolVersion: UInt8 = 3 + + var timestamp: TimeInterval = 0 + + var hasS0S1Packet: Bool { + RTMPHandshake.sigSize + 1 < inputBuffer.count + } + + var hasS2Packet: Bool { + RTMPHandshake.sigSize == inputBuffer.count + } + + private var inputBuffer: Data = .init() + + var c0c1packet: Data { + let packet = ByteArray() + .writeUInt8(RTMPHandshake.protocolVersion) + .writeInt32(Int32(timestamp)) + .writeBytes(Data([0x00, 0x00, 0x00, 0x00])) + for _ in 0.. Data { + defer { + inputBuffer.removeSubrange(0...Self.sigSize) + } + return ByteArray() + .writeBytes(inputBuffer.subdata(in: 1..<5)) + .writeInt32(Int32(Date().timeIntervalSince1970 - timestamp)) + .writeBytes(inputBuffer.subdata(in: 9..> 4) ?? .unknown + } + + init(_ header: RTMPChunkMessageHeader) { + streamId = header.messageStreamId + timestamp = header.timestamp + payload = header.payload + } + + init?(streamId: UInt32, timestamp: UInt32, formatDescription: CMFormatDescription?) { + self.streamId = streamId + self.timestamp = timestamp + switch formatDescription?.mediaSubType { + case .opus: + guard let header = OpusHeaderPacket(formatDescription: formatDescription) else { + return nil + } + var buffer = Data([RTMPAudioCodec.exheader.rawValue << 4 | RTMPAudioPacketType.sequenceStart.rawValue]) + buffer.append(contentsOf: RTMPAudioFourCC.opus.rawValue.bigEndian.data) + buffer.append(header.payload) + self.payload = buffer + default: + guard let config = AudioSpecificConfig(formatDescription: formatDescription) else { + return nil + } + var buffer = Data([Self.aacHeader, RTMPAACPacketType.seq.rawValue]) + buffer.append(contentsOf: config.bytes) + self.payload = buffer + } + } + + init?(streamId: UInt32, timestamp: UInt32, audioBuffer: AVAudioCompressedBuffer?) { + guard let audioBuffer else { + return nil + } + self.streamId = streamId + self.timestamp = timestamp + switch audioBuffer.format.formatDescription.mediaSubType { + case .opus: + var buffer = Data([RTMPAudioCodec.exheader.rawValue << 4 | RTMPAudioPacketType.codedFrames.rawValue]) + buffer.append(contentsOf: RTMPAudioFourCC.opus.rawValue.bigEndian.data) + buffer.append(audioBuffer.data.assumingMemoryBound(to: UInt8.self), count: Int(audioBuffer.byteLength)) + self.payload = buffer + default: + var buffer = Data([Self.aacHeader, RTMPAACPacketType.raw.rawValue]) + buffer.append(audioBuffer.data.assumingMemoryBound(to: UInt8.self), count: Int(audioBuffer.byteLength)) + self.payload = buffer + } + } + + func copyMemory(_ audioBuffer: AVAudioCompressedBuffer?) { + payload.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) in + guard let baseAddress = buffer.baseAddress, let audioBuffer else { + return + } + let byteCount = payload.count - codec.headerSize + audioBuffer.packetDescriptions?.pointee = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: UInt32(byteCount)) + audioBuffer.packetCount = 1 + audioBuffer.byteLength = UInt32(byteCount) + audioBuffer.data.copyMemory(from: baseAddress.advanced(by: codec.headerSize), byteCount: byteCount) + } + } + + func makeAudioFormat() -> AVAudioFormat? { + switch payload[1] { + case RTMPAACPacketType.seq.rawValue: + let config = AudioSpecificConfig(bytes: [UInt8](payload[codec.headerSize..> 4 == RTMPVideoCodec.avc.rawValue + } + + var compositionTime: Int32 { + let offset = self.offset + var compositionTime = Int32(data: [0] + payload[2 + offset..<5 + offset]).bigEndian + compositionTime <<= 8 + compositionTime /= 256 + return compositionTime + } + + private var offset: Int { + return isExHeader ? packetType == RTMPVideoPacketType.codedFrames.rawValue ? 3 : 0 : 0 + } + + init(_ header: RTMPChunkMessageHeader) { + streamId = header.messageStreamId + timestamp = header.timestamp + self.payload = header.payload + } + + init?(streamId: UInt32, timestamp: UInt32, formatDescription: CMFormatDescription?) { + guard let formatDescription else { + return nil + } + self.streamId = streamId + self.timestamp = timestamp + switch formatDescription.mediaSubType { + case .h264: + guard let configurationBox = formatDescription.configurationBox else { + return nil + } + var buffer = Data([RTMPFrameType.key.rawValue << 4 | RTMPVideoCodec.avc.rawValue, RTMPAVCPacketType.seq.rawValue, 0, 0, 0]) + buffer.append(configurationBox) + payload = buffer + case .hevc: + guard let configurationBox = formatDescription.configurationBox else { + return nil + } + var buffer = Data([0b10000000 | RTMPFrameType.key.rawValue << 4 | RTMPVideoPacketType.sequenceStart.rawValue, 0x68, 0x76, 0x63, 0x31]) + buffer.append(configurationBox) + payload = buffer + default: + return nil + } + } + + init?(streamId: UInt32, timestamp: UInt32, sampleBuffer: CMSampleBuffer?) { + guard let sampleBuffer, let data = try? sampleBuffer.dataBuffer?.dataBytes() else { + return nil + } + self.streamId = streamId + self.timestamp = timestamp + let keyframe = !sampleBuffer.isNotSync + switch sampleBuffer.formatDescription?.mediaSubType { + case .h264: + let compositionTime = sampleBuffer.getCompositionTime(Self.ctsOffset) + var buffer = Data([((keyframe ? RTMPFrameType.key.rawValue : RTMPFrameType.inter.rawValue) << 4) | RTMPVideoCodec.avc.rawValue, RTMPAVCPacketType.nal.rawValue]) + buffer.append(contentsOf: compositionTime.bigEndian.data[1..<4]) + buffer.append(data) + payload = buffer + case .hevc: + let compositionTime = sampleBuffer.getCompositionTime(Self.ctsOffset) + var buffer = Data([0b10000000 | ((keyframe ? RTMPFrameType.key.rawValue : RTMPFrameType.inter.rawValue) << 4) | RTMPVideoPacketType.codedFrames.rawValue, 0x68, 0x76, 0x63, 0x31]) + buffer.append(contentsOf: compositionTime.bigEndian.data[1..<4]) + buffer.append(data) + payload = buffer + default: + return nil + } + } + + func makeSampleBuffer(_ presentationTimeStamp: CMTime, formatDesciption: CMFormatDescription?) -> CMSampleBuffer? { + var sampleBuffer: CMSampleBuffer? + let blockBuffer = payload.makeBlockBuffer(advancedBy: RTMPTagType.video.headerSize + offset) + var sampleSize = blockBuffer?.dataLength ?? 0 + var timing = CMSampleTimingInfo( + duration: .invalid, + presentationTimeStamp: compositionTime == 0 ? presentationTimeStamp : CMTimeAdd(presentationTimeStamp, .init(value: CMTimeValue(compositionTime), timescale: 1000)), + decodeTimeStamp: compositionTime == 0 ? .invalid : presentationTimeStamp + ) + guard CMSampleBufferCreate( + allocator: kCFAllocatorDefault, + dataBuffer: blockBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: formatDesciption, + sampleCount: 1, + sampleTimingEntryCount: 1, + sampleTimingArray: &timing, + sampleSizeEntryCount: 1, + sampleSizeArray: &sampleSize, + sampleBufferOut: &sampleBuffer) == noErr else { + return nil + } + sampleBuffer?.isNotSync = !(payload[0] >> 4 & 0b0111 == RTMPFrameType.key.rawValue) + return sampleBuffer + } + + func makeFormatDescription() -> CMFormatDescription? { + if isExHeader { + // hevc + if payload[1] == 0x68 && payload[2] == 0x76 && payload[3] == 0x63 && payload[4] == 0x31 { + var config = HEVCDecoderConfigurationRecord() + config.data = payload.subdata(in: RTMPTagType.video.headerSize..> 4 == RTMPVideoCodec.avc.rawValue { + var config = AVCDecoderConfigurationRecord() + config.data = payload.subdata(in: RTMPTagType.video.headerSize.. + + var stream: any StreamConvertible { + _stream + } + + private let uri: RTMPURL + private let mode: SessionMode + private var retryCount: Int = 0 + private var maxRetryCount = kSession_maxRetryCount + private lazy var connection: RTMPConnection = { + switch mode { + case .publish: + return RTMPConnection() + case .playback: + return RTMPConnection(flashVer: "MAC 9,0,124,2") + } + }() + private lazy var _stream: RTMPStream = { + switch mode { + case .publish: + return RTMPStream(connection: connection, fcPublishName: uri.streamName) + case .playback: + return RTMPStream(connection: connection) + } + }() + private var disconnctedTask: Task? { + didSet { + oldValue?.cancel() + } + } + + init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) { + self.uri = RTMPURL(url: uri) + self.mode = mode + } + + func setMaxRetryCount(_ maxRetryCount: Int) { + self.maxRetryCount = maxRetryCount + } + + func connect(_ disconnected: @Sendable @escaping () -> Void) async throws { + guard await connection.connected == false else { + return + } + _readyState.value = .connecting + disconnctedTask = nil + // Retry handling at the TCP/IP level and during RTMP connection. + do { + _ = try await connection.connect(uri.command) + } catch { + guard retryCount < maxRetryCount else { + retryCount = 0 + _readyState.value = .closed + throw error + } + // It is being delayed using backoff for congestion control. + try await Task.sleep(nanoseconds: UInt64(pow(2.0, Double(retryCount))) * 1_000_000_000) + retryCount += 1 + try await connect(disconnected) + } + do { + retryCount = 0 + switch mode { + case .publish: + _ = try await _stream.publish(uri.streamName) + case .playback: + _ = try await _stream.play(uri.streamName) + } + _readyState.value = .open + } catch { + // Errors at the NetStream layer, such as incorrect stream names, + // cannot be resolved by retrying, so they are thrown as exceptions. + try await connection.close() + _readyState.value = .closed + throw error + } + disconnctedTask = Task { + for await event in await connection.status { + switch event.code { + case RTMPConnection.Code.connectClosed.rawValue: + _readyState.value = .closed + disconnected() + default: + break + } + } + } + } + + func close() async throws { + guard await connection.connected else { + return + } + _readyState.value = .closing + disconnctedTask = nil + try await connection.close() + retryCount = 0 + _readyState.value = .closed + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSessionFactory.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSessionFactory.swift new file mode 100644 index 000000000..7998c5138 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSessionFactory.swift @@ -0,0 +1,13 @@ +import Foundation +import HaishinKit + +public struct RTMPSessionFactory: SessionFactory { + public let supportedProtocols: Set = ["rtmp", "rtmps"] + + public init() { + } + + public func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session { + return RTMPSession(uri: uri, mode: mode, configuration: configuration) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSharedObject.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSharedObject.swift new file mode 100644 index 000000000..d8b493062 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSharedObject.swift @@ -0,0 +1,212 @@ +import Foundation +import HaishinKit + +enum RTMPSharedObjectType: UInt8 { + case use = 1 + case release = 2 + case requestChange = 3 + case change = 4 + case success = 5 + case sendMessage = 6 + case status = 7 + case clear = 8 + case remove = 9 + case requestRemove = 10 + case useSuccess = 11 + case unknown = 255 +} + +struct RTMPSharedObjectEvent { + var type: RTMPSharedObjectType = .unknown + var name: String? + var data: (any Sendable)? + + init(type: RTMPSharedObjectType) { + self.type = type + } + + init(type: RTMPSharedObjectType, name: String, data: (any Sendable)?) { + self.type = type + self.name = name + self.data = data + } + + init?(serializer: inout any AMFSerializer) throws { + guard let byte: UInt8 = try? serializer.readUInt8(), let type = RTMPSharedObjectType(rawValue: byte) else { + return nil + } + self.type = type + let length = Int(try serializer.readUInt32()) + let position: Int = serializer.position + if 0 < length { + name = try serializer.readUTF8() + switch type { + case .status: + data = try serializer.readUTF8() + default: + if serializer.position - position < length { + data = try serializer.deserialize() + } + } + } + } + + func serialize(_ serializer: inout any AMFSerializer) { + serializer.writeUInt8(type.rawValue) + guard let name: String = name else { + serializer.writeUInt32(0) + return + } + let position: Int = serializer.position + serializer + .writeUInt32(0) + .writeUInt16(UInt16(name.utf8.count)) + .writeUTF8Bytes(name) + .serialize(data) + let size: Int = serializer.position - position + serializer.position = position + serializer.writeUInt32(UInt32(size) - 4) + let length = serializer.length + serializer.position = length + } +} + +extension RTMPSharedObjectEvent: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} + +// MARK: - +/// The RTMPSharedObject class is used to read and write data on a server. +public actor RTMPSharedObject { + private static nonisolated(unsafe) var remoteSharedObjects: HKAtomic<[String: RTMPSharedObject]> = .init([:]) + + /// Returns a reference to a shared object on a server. + public static func getRemote(withName: String, remotePath: String, persistence: Bool) -> RTMPSharedObject { + let key = remotePath + "/" + withName + "?persistence=" + persistence.description + guard let sharedObject = remoteSharedObjects.value[key] else { + let sharedObject = RTMPSharedObject(name: withName, path: remotePath, persistence: persistence) + remoteSharedObjects.mutate { $0[key] = sharedObject } + return sharedObject + } + return sharedObject + } + + /// The AMF object encoding type. + public let objectEncoding = RTMPConnection.defaultObjectEncoding + /// The current data storage. + public private(set) var data = AMFObject() + + private var succeeded = false { + didSet { + guard succeeded else { + return + } + Task { + for (key, value) in data { + await setProperty(key, value) + } + } + } + } + + let name: String + let path: String + var timestamp: TimeInterval = 0 + let persistence: Bool + var currentVersion: UInt32 = 0 + private var connection: RTMPConnection? + + init(name: String, path: String, persistence: Bool) { + self.name = name + self.path = path + self.persistence = persistence + } + + /// Updates the value of a property in shared object. + public func setProperty(_ name: String, _ value: (any Sendable)?) async { + data[name] = value + guard let connection, succeeded else { + return + } + await connection.doOutput(.one, chunkStreamId: .command, message: makeMessage([RTMPSharedObjectEvent(type: .requestChange, name: name, data: value)])) + } + + /// Connects to a remove shared object on a server. + public func connect(_ rtmpConnection: RTMPConnection) async { + if self.connection != nil { + await close() + } + self.connection = rtmpConnection + if await rtmpConnection.connected { + await rtmpConnection.doOutput(.zero, chunkStreamId: .command, message: makeMessage([RTMPSharedObjectEvent(type: .use)])) + } + } + + /// Purges all of the data. + public func clear() async { + data.removeAll(keepingCapacity: false) + await connection?.doOutput(.one, chunkStreamId: .command, message: makeMessage([RTMPSharedObjectEvent(type: .clear)])) + } + + /// Closes the connection a server. + public func close() async { + data.removeAll(keepingCapacity: false) + await connection?.doOutput(.one, chunkStreamId: .command, message: makeMessage([RTMPSharedObjectEvent(type: .release)])) + connection = nil + } + + final func on(message: RTMPSharedObjectMessage) { + currentVersion = message.currentVersion + var changeList: [AMFObject] = [] + for event in message.events { + var change: AMFObject = [ + "code": "", + "name": event.name, + "oldValue": nil + ] + switch event.type { + case .change: + change["code"] = "change" + change["oldValue"] = data.removeValue(forKey: event.name!) + data[event.name!] = event.data + case .success: + change["code"] = "success" + case .status: + change["code"] = "reject" + change["oldValue"] = data.removeValue(forKey: event.name!) + case .clear: + data.removeAll(keepingCapacity: false) + change["code"] = "clear" + case .remove: + change["code"] = "delete" + case .useSuccess: + succeeded = true + continue + default: + continue + } + changeList.append(change) + } + } + + private func makeMessage(_ events: [RTMPSharedObjectEvent]) -> RTMPSharedObjectMessage { + let now = Date() + let timestamp: TimeInterval = now.timeIntervalSince1970 - self.timestamp + self.timestamp = now.timeIntervalSince1970 + defer { + currentVersion += 1 + } + return RTMPSharedObjectMessage( + timestamp: UInt32(timestamp * 1000), + streamId: 0, + objectEncoding: objectEncoding, + sharedObjectName: name, + currentVersion: succeeded ? 0 : currentVersion, + flags: Data([0x00, 0x00, 0x00, persistence ? 0x02 : 0x00, 0x00, 0x00, 0x00, 0x00]), + events: events + ) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift new file mode 100644 index 000000000..fe8360030 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift @@ -0,0 +1,221 @@ +import Foundation +import HaishinKit +import Network + +final actor RTMPSocket { + static let defaultWindowSizeC = Int(UInt8.max) + + enum Error: Swift.Error { + case invalidState + case endOfStream + case connectionTimedOut + case connectionNotEstablished(_ error: NWError?) + } + + private var timeout: UInt64 = 15 + private var connected = false + private var windowSizeC = RTMPSocket.defaultWindowSizeC + private var securityLevel: StreamSocketSecurityLevel = .none + private var totalBytesIn = 0 + private var queueBytesOut = 0 + private var totalBytesOut = 0 + private var parameters: NWParameters = .tcp + private var connection: NWConnection? { + didSet { + oldValue?.viabilityUpdateHandler = nil + oldValue?.stateUpdateHandler = nil + oldValue?.forceCancel() + } + } + private var outputs: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + private var qualityOfService: DispatchQoS = .userInitiated + private var continuation: CheckedContinuation? + private lazy var networkQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.RTMPSocket.network", qos: qualityOfService) + + init() { + } + + init(qualityOfService: DispatchQoS, securityLevel: StreamSocketSecurityLevel) { + self.qualityOfService = qualityOfService + switch securityLevel { + case .ssLv2, .ssLv3, .tlSv1, .negotiatedSSL: + parameters = .tls + default: + parameters = .tcp + } + } + + func connect(_ name: String, port: Int) async throws { + guard !connected else { + throw Error.invalidState + } + totalBytesIn = 0 + totalBytesOut = 0 + queueBytesOut = 0 + do { + let connection = NWConnection(to: NWEndpoint.hostPort(host: .init(name), port: .init(integerLiteral: NWEndpoint.Port.IntegerLiteralType(port))), using: parameters) + self.connection = connection + try await withCheckedThrowingContinuation { (checkedContinuation: CheckedContinuation) in + self.continuation = checkedContinuation + Task { + try? await Task.sleep(nanoseconds: timeout * 1_000_000_000) + guard let continuation else { + return + } + continuation.resume(throwing: Error.connectionTimedOut) + self.continuation = nil + close() + } + connection.stateUpdateHandler = { state in + Task { await self.stateDidChange(to: state) } + } + connection.viabilityUpdateHandler = { viability in + Task { await self.viabilityDidChange(to: viability) } + } + connection.start(queue: networkQueue) + } + } catch { + throw error + } + } + + func send(_ data: Data) { + guard connected else { + return + } + queueBytesOut += data.count + outputs?.yield(data) + } + + func send(_ iterator: AnyIterator) { + guard connected else { + return + } + for data in iterator { + queueBytesOut += data.count + outputs?.yield(data) + } + } + + func recv() -> AsyncStream { + AsyncStream { continuation in + Task { + do { + while connected { + let data = try await recv() + continuation.yield(data) + totalBytesIn += data.count + } + } catch { + continuation.finish() + } + } + } + } + + func close(_ error: NWError? = nil) { + guard connection != nil else { + return + } + if let continuation { + continuation.resume(throwing: Error.connectionNotEstablished(error)) + self.continuation = nil + } + connected = false + outputs = nil + connection = nil + continuation = nil + } + + private func stateDidChange(to state: NWConnection.State) { + switch state { + case .ready: + logger.info("Connection is ready.") + connected = true + let (stream, continuation) = AsyncStream.makeStream() + Task { + for await data in stream where connected { + try await send(data) + totalBytesOut += data.count + queueBytesOut -= data.count + } + } + self.outputs = continuation + self.continuation?.resume() + self.continuation = nil + case .waiting(let error): + logger.warn("Connection waiting:", error) + close(error) + case .setup: + logger.debug("Connection is setting up.") + case .preparing: + logger.debug("Connection is preparing.") + case .failed(let error): + logger.warn("Connection failed:", error) + close(error) + case .cancelled: + logger.info("Connection cancelled.") + close() + @unknown default: + logger.error("Unknown connection state.") + } + } + + private func viabilityDidChange(to viability: Bool) { + logger.info("Connection viability changed to ", viability) + if viability == false { + close() + } + } + + private func send(_ data: Data) async throws { + return try await withCheckedThrowingContinuation { continuation in + guard let connection else { + continuation.resume(throwing: Error.invalidState) + return + } + connection.send(content: data, completion: .contentProcessed { error in + if let error { + continuation.resume(throwing: error) + return + } + continuation.resume() + }) + } + } + + private func recv() async throws -> Data { + return try await withCheckedThrowingContinuation { continuation in + guard let connection else { + continuation.resume(throwing: Error.invalidState) + return + } + connection.receive(minimumIncompleteLength: 0, maximumLength: windowSizeC) { content, _, _, error in + if let error { + continuation.resume(throwing: error) + return + } + if let content { + continuation.resume(returning: content) + } else { + continuation.resume(throwing: Error.endOfStream) + } + } + } + } +} + +extension RTMPSocket: NetworkTransportReporter { + // MARK: NetworkTransportReporter + func makeNetworkMonitor() async -> NetworkMonitor { + return .init(self) + } + + func makeNetworkTransportReport() -> NetworkTransportReport { + return .init(queueBytesOut: queueBytesOut, totalBytesIn: totalBytesIn, totalBytesOut: totalBytesOut) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStatus.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStatus.swift new file mode 100644 index 000000000..395c6b8f4 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStatus.swift @@ -0,0 +1,48 @@ +import Foundation + +/// A struct that represents a it reports its rtmp status. +@dynamicMemberLookup +public struct RTMPStatus: Sendable { + /// The string that represents a specific event. + public let code: String + /// The string that is either "status" or "error". + public let level: String + /// The string that is code description. + public let description: String + + private let data: AMFObject? + + init?(_ data: AMFObject?) { + guard + let data, + let code = data["code"] as? String, + let level = data["level"] as? String else { + return nil + } + self.data = data + self.code = code + self.level = level + self.description = (data["description"] as? String) ?? "" + } + + init(code: String, level: String, description: String) { + self.code = code + self.level = level + self.description = description + self.data = nil + } + + public subscript(dynamicMember key: String) -> String? { + guard let value = data?[key] as? String else { + return nil + } + return value + } + + public subscript(dynamicMember key: String) -> Double? { + guard let value = data?[key] as? Double else { + return nil + } + return value + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStream.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStream.swift new file mode 100644 index 000000000..88073111a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStream.swift @@ -0,0 +1,815 @@ +@preconcurrency import AVFAudio +import AVFoundation +import Combine +import HaishinKit + +#if canImport(UIKit) +import UIKit +typealias View = UIView +#endif + +#if canImport(AppKit) && !targetEnvironment(macCatalyst) +import AppKit +typealias View = NSView +#endif + +/// An object that provides the interface to control a one-way channel over an RTMPConnection. +public actor RTMPStream { + /// The error domain code. + public enum Error: Swift.Error { + /// An invalid internal stare. + case invalidState + /// The requested operation timed out. + case requestTimedOut + /// A request fails. + case requestFailed(response: RTMPResponse) + /// An unsupported codec. + case unsupportedCodec + } + + /// NetStatusEvent#info.code for NetStream + /// - seealso: https://help.adobe.com/en_US/air/reference/html/flash/events/NetStatusEvent.html#NET_STATUS + public enum Code: String { + case bufferEmpty = "NetStream.Buffer.Empty" + case bufferFlush = "NetStream.Buffer.Flush" + case bufferFull = "NetStream.Buffer.Full" + case connectClosed = "NetStream.Connect.Closed" + case connectFailed = "NetStream.Connect.Failed" + case connectRejected = "NetStream.Connect.Rejected" + case connectSuccess = "NetStream.Connect.Success" + case drmUpdateNeeded = "NetStream.DRM.UpdateNeeded" + case failed = "NetStream.Failed" + case multicastStreamReset = "NetStream.MulticastStream.Reset" + case pauseNotify = "NetStream.Pause.Notify" + case playFailed = "NetStream.Play.Failed" + case playFileStructureInvalid = "NetStream.Play.FileStructureInvalid" + case playInsufficientBW = "NetStream.Play.InsufficientBW" + case playNoSupportedTrackFound = "NetStream.Play.NoSupportedTrackFound" + case playReset = "NetStream.Play.Reset" + case playStart = "NetStream.Play.Start" + case playStop = "NetStream.Play.Stop" + case playStreamNotFound = "NetStream.Play.StreamNotFound" + case playTransition = "NetStream.Play.Transition" + case playUnpublishNotify = "NetStream.Play.UnpublishNotify" + case publishBadName = "NetStream.Publish.BadName" + case publishIdle = "NetStream.Publish.Idle" + case publishStart = "NetStream.Publish.Start" + case recordAlreadyExists = "NetStream.Record.AlreadyExists" + case recordFailed = "NetStream.Record.Failed" + case recordNoAccess = "NetStream.Record.NoAccess" + case recordStart = "NetStream.Record.Start" + case recordStop = "NetStream.Record.Stop" + case recordDiskQuotaExceeded = "NetStream.Record.DiskQuotaExceeded" + case secondScreenStart = "NetStream.SecondScreen.Start" + case secondScreenStop = "NetStream.SecondScreen.Stop" + case seekFailed = "NetStream.Seek.Failed" + case seekInvalidTime = "NetStream.Seek.InvalidTime" + case seekNotify = "NetStream.Seek.Notify" + case stepNotify = "NetStream.Step.Notify" + case unpauseNotify = "NetStream.Unpause.Notify" + case unpublishSuccess = "NetStream.Unpublish.Success" + case videoDimensionChange = "NetStream.Video.DimensionChange" + + public var level: String { + switch self { + case .bufferEmpty: + return "status" + case .bufferFlush: + return "status" + case .bufferFull: + return "status" + case .connectClosed: + return "status" + case .connectFailed: + return "error" + case .connectRejected: + return "error" + case .connectSuccess: + return "status" + case .drmUpdateNeeded: + return "status" + case .failed: + return "error" + case .multicastStreamReset: + return "status" + case .pauseNotify: + return "status" + case .playFailed: + return "error" + case .playFileStructureInvalid: + return "error" + case .playInsufficientBW: + return "warning" + case .playNoSupportedTrackFound: + return "status" + case .playReset: + return "status" + case .playStart: + return "status" + case .playStop: + return "status" + case .playStreamNotFound: + return "error" + case .playTransition: + return "status" + case .playUnpublishNotify: + return "status" + case .publishBadName: + return "error" + case .publishIdle: + return "status" + case .publishStart: + return "status" + case .recordAlreadyExists: + return "status" + case .recordFailed: + return "error" + case .recordNoAccess: + return "error" + case .recordStart: + return "status" + case .recordStop: + return "status" + case .recordDiskQuotaExceeded: + return "error" + case .secondScreenStart: + return "status" + case .secondScreenStop: + return "status" + case .seekFailed: + return "error" + case .seekInvalidTime: + return "error" + case .seekNotify: + return "status" + case .stepNotify: + return "status" + case .unpauseNotify: + return "status" + case .unpublishSuccess: + return "status" + case .videoDimensionChange: + return "status" + } + } + + func status(_ description: String) -> RTMPStatus { + return .init(code: rawValue, level: level, description: description) + } + } + + /// The type of publish options. + public enum HowToPublish: String, Sendable { + /// Publish with server-side recording. + case record + /// Publish with server-side recording which is to append file if exists. + case append + /// Publish with server-side recording which is to append and ajust time file if exists. + case appendWithGap + /// Publish. + case live + } + + static let defaultID: UInt32 = 0 + static let supportedAudioCodecs: [AudioCodecSettings.Format] = [.aac, .opus] + static let supportedVideoCodecs: [VideoCodecSettings.Format] = VideoCodecSettings.Format.allCases + + /// The RTMPStream metadata. + public private(set) var metadata: AMFArray = .init(count: 0) + /// The RTMPStreamInfo object whose properties contain data. + public private(set) var info = RTMPStreamInfo() + /// The object encoding (AMF). Framework supports AMF0 only. + public private(set) var objectEncoding = RTMPConnection.defaultObjectEncoding + /// The boolean value that indicates audio samples allow access or not. + public private(set) var audioSampleAccess = true + /// The boolean value that indicates video samples allow access or not. + public private(set) var videoSampleAccess = true + /// The number of video frames per seconds. + @Published public private(set) var currentFPS: UInt16 = 0 + /// The ready state of stream. + @Published public private(set) var readyState: StreamReadyState = .idle + /// The stream of events you receive RTMP status events from a service. + public var status: AsyncStream { + AsyncStream { continuation in + statusContinuation = continuation + } + } + /// The stream's name used for FMLE-compatible sequences. + public private(set) var fcPublishName: String? + + public private(set) var videoTrackId: UInt8? = UInt8.max + public private(set) var audioTrackId: UInt8? = UInt8.max + + private var isPaused = false + private var startedAt = Date() { + didSet { + dataTimestamps.removeAll() + } + } + package var outputs: [any StreamOutput] = [] + private var frameCount: UInt16 = 0 + private var audioBuffer: AVAudioCompressedBuffer? + private var howToPublish: RTMPStream.HowToPublish = .live + private var continuation: CheckedContinuation? { + didSet { + if continuation == nil { + expectedResponse = nil + } + } + } + private var dataTimestamps: [String: Date] = .init() + private var audioTimestamp: RTMPTimestamp = .init() + private var videoTimestamp: RTMPTimestamp = .init() + private var requestTimeout = RTMPConnection.defaultRequestTimeout + private var expectedResponse: Code? + package var bitRateStrategy: (any StreamBitRateStrategy)? + private var statusContinuation: AsyncStream.Continuation? + private(set) var id: UInt32 = RTMPStream.defaultID + package lazy var incoming = IncomingStream(self) + package lazy var outgoing = OutgoingStream() + private weak var connection: RTMPConnection? + + private var audioFormat: AVAudioFormat? { + didSet { + guard audioFormat != oldValue else { + return + } + switch readyState { + case .publishing: + guard let message = RTMPAudioMessage(streamId: id, timestamp: 0, formatDescription: audioFormat?.formatDescription) else { + return + } + doOutput(oldValue == nil ? .zero : .one, chunkStreamId: .audio, message: message) + case .playing: + if let audioFormat { + audioBuffer = AVAudioCompressedBuffer(format: audioFormat, packetCapacity: 1, maximumPacketSize: 1024 * Int(audioFormat.channelCount)) + } else { + audioBuffer = nil + } + default: + break + } + } + } + + private var videoFormat: CMFormatDescription? { + didSet { + guard videoFormat != oldValue else { + return + } + switch readyState { + case .publishing: + guard let message = RTMPVideoMessage(streamId: id, timestamp: 0, formatDescription: videoFormat) else { + return + } + doOutput(oldValue == nil ? .zero : .one, chunkStreamId: .video, message: message) + default: + break + } + } + } + + /// Creates a new stream. + public init(connection: RTMPConnection, fcPublishName: String? = nil) { + self.connection = connection + self.fcPublishName = fcPublishName + self.requestTimeout = connection.requestTimeout + Task { + await connection.addStream(self) + if await connection.connected { + await createStream() + } + } + } + + deinit { + outputs.removeAll() + } + + /// Plays a live stream from a server. + public func play(_ arguments: (any Sendable)?...) async throws -> RTMPResponse { + guard let name = arguments.first as? String else { + switch readyState { + case .playing: + info.resourceName = nil + return try await close() + default: + throw Error.invalidState + } + } + do { + audioFormat = nil + videoFormat = nil + let response = try await withCheckedThrowingContinuation { continuation in + readyState = .play + expectedResponse = Code.playStart + self.continuation = continuation + Task { + await incoming.startRunning() + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + self.continuation.map { + $0.resume(throwing: Error.requestTimedOut) + } + self.continuation = nil + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "play", + commandObject: nil, + arguments: arguments + )) + } + startedAt = .init() + readyState = .playing + info.resourceName = name + return response + } catch { + Task { await incoming.stopRunning() } + outgoing.stopRunning() + readyState = .idle + throw error + } + } + + /// Seeks the keyframe. + public func seek(_ offset: Double) async throws { + guard readyState == .playing else { + throw Error.invalidState + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "seek", + commandObject: nil, + arguments: [offset] + )) + } + + /// Sends streaming audio, vidoe and data message from client. + public func publish(_ name: String?, type: RTMPStream.HowToPublish = .live) async throws -> RTMPResponse { + guard let name else { + switch readyState { + case .publishing: + return try await close() + default: + throw Error.invalidState + } + } + do { + audioFormat = nil + videoFormat = nil + let response = try await withCheckedThrowingContinuation { continuation in + readyState = .publish + expectedResponse = Code.publishStart + self.continuation = continuation + Task { + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + self.continuation.map { + $0.resume(throwing: Error.requestTimedOut) + } + self.continuation = nil + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "publish", + commandObject: nil, + arguments: [name, type.rawValue] + )) + } + info.resourceName = name + howToPublish = type + startedAt = .init() + metadata = makeMetadata() + readyState = .publishing + try? send("@setDataFrame", arguments: "onMetaData", metadata) + outgoing.startRunning() + Task { + for await audio in outgoing.audioOutputStream { + append(audio.0, when: audio.1) + } + } + Task { + for await video in outgoing.videoOutputStream { + append(video) + } + } + Task { + for await video in outgoing.videoInputStream { + outgoing.append(video: video) + } + } + return response + } catch { + readyState = .idle + throw error + } + } + + /// Stops playing or publishing and makes available other uses. + public func close() async throws -> RTMPResponse { + guard readyState == .playing || readyState == .publishing else { + throw Error.invalidState + } + outgoing.stopRunning() + return try await withCheckedThrowingContinuation { continutation in + self.continuation = continutation + switch readyState { + case .playing: + expectedResponse = Code.playStop + case .publishing: + expectedResponse = Code.unpublishSuccess + default: + break + } + Task { + await incoming.stopRunning() + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + self.continuation.map { + $0.resume(throwing: Error.requestTimedOut) + } + self.continuation = nil + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "closeStream", + commandObject: nil, + arguments: [] + )) + readyState = .idle + } + } + + /// Sends a message on a published stream to all subscribing clients. + /// + /// ``` + /// // To add a metadata to a live stream sent to an RTMP Service. + /// stream.send("@setDataFrame", "onMetaData", metaData) + /// // To clear a metadata that has already been set in the stream. + /// stream.send("@clearDataFrame", "onMetaData"); + /// ``` + /// + /// - Parameters: + /// - handlerName: The message to send. + /// - arguments: Optional arguments. + /// - isResetTimestamp: A workaround option for sending timestamps as 0 in some services. + public func send(_ handlerName: String, arguments: (any Sendable)?..., isResetTimestamp: Bool = false) throws { + guard readyState == .publishing else { + throw Error.invalidState + } + if isResetTimestamp { + dataTimestamps[handlerName] = nil + } + let dataWasSent = dataTimestamps[handlerName] == nil ? false : true + let timestmap: UInt32 = dataWasSent ? UInt32((dataTimestamps[handlerName]?.timeIntervalSinceNow ?? 0) * -1000) : UInt32(startedAt.timeIntervalSinceNow * -1000) + doOutput( + dataWasSent ? RTMPChunkType.one : RTMPChunkType.zero, + chunkStreamId: .data, + message: RTMPDataMessage( + streamId: id, + objectEncoding: objectEncoding, + timestamp: timestmap, + handlerName: handlerName, + arguments: arguments + ) + ) + dataTimestamps[handlerName] = .init() + } + + /// Incoming audio plays on a stream or not. + public func receiveAudio(_ receiveAudio: Bool) async throws { + guard readyState == .playing else { + throw Error.invalidState + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "receiveAudio", + commandObject: nil, + arguments: [receiveAudio] + )) + } + + /// Incoming video plays on a stream or not. + public func receiveVideo(_ receiveVideo: Bool) async throws { + guard readyState == .playing else { + throw Error.invalidState + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "receiveVideo", + commandObject: nil, + arguments: [receiveVideo] + )) + } + + /// Pauses playback a stream or not. + public func pause(_ paused: Bool) async throws -> RTMPResponse { + guard readyState == .playing else { + throw Error.invalidState + } + let response = try await withCheckedThrowingContinuation { continuation in + expectedResponse = isPaused ? Code.pauseNotify : Code.unpauseNotify + self.continuation = continuation + Task { + try? await Task.sleep(nanoseconds: requestTimeout * 1_000_000) + self.continuation.map { + $0.resume(throwing: Error.requestTimedOut) + } + self.continuation = nil + } + doOutput(.zero, chunkStreamId: .command, message: RTMPCommandMessage( + streamId: id, + transactionId: 0, + objectEncoding: objectEncoding, + commandName: "pause", + commandObject: nil, + arguments: [paused, floor(startedAt.timeIntervalSinceNow * -1000)] + )) + } + isPaused = paused + return response + } + + /// Pauses or resumes playback of a stream. + public func togglePause() async throws -> RTMPResponse { + try await pause(!isPaused) + } + + func doOutput(_ type: RTMPChunkType, chunkStreamId: RTMPChunkStreamId, message: some RTMPMessage) { + Task { + let length = await connection?.doOutput(type, chunkStreamId: chunkStreamId, message: message) ?? 0 + info.byteCount += length + } + } + + func dispatch(_ message: some RTMPMessage, type: RTMPChunkType) { + info.byteCount += message.payload.count + switch message { + case let message as RTMPCommandMessage: + let response = RTMPResponse(message) + switch message.commandName { + case "onStatus": + switch response.status?.level { + case "status": + // During playback, only NetStream.Play.Start is awaited, as it follows the next sequence. + // 1. NetStream.Play.Rest + // 2. NetStream.Play.Start + if let code = response.status?.code, expectedResponse?.rawValue == code { + continuation?.resume(returning: response) + continuation = nil + } + default: + continuation?.resume(throwing: Error.requestFailed(response: response)) + continuation = nil + } + _ = response.status.map { + statusContinuation?.yield($0) + } + default: + logger.info(message) + } + case let message as RTMPAudioMessage: + append(message, type: type) + case let message as RTMPVideoMessage: + append(message, type: type) + case let message as RTMPDataMessage: + switch message.handlerName { + case "onMetaData": + metadata = message.arguments[0] as? AMFArray ?? .init(count: 0) + case "|RtmpSampleAccess": + audioSampleAccess = message.arguments[0] as? Bool ?? true + videoSampleAccess = message.arguments[1] as? Bool ?? true + default: + break + } + case let message as RTMPUserControlMessage: + switch message.event { + case .bufferEmpty: + statusContinuation?.yield(Code.bufferEmpty.status("")) + case .bufferFull: + statusContinuation?.yield(Code.bufferFull.status("")) + default: + break + } + default: + break + } + } + + func createStream() async { + if let fcPublishName { + // FMLE-compatible sequences + async let _ = connection?.call("releaseStream", arguments: fcPublishName) + async let _ = connection?.call("FCPublish", arguments: fcPublishName) + } + do { + let response = try await connection?.call("createStream") + guard let first = response?.arguments.first as? Double else { + return + } + id = UInt32(first) + readyState = .idle + } catch { + logger.error(error) + } + } + + func deleteStream() async { + guard let fcPublishName, readyState == .publishing else { + return + } + outgoing.stopRunning() + async let _ = try? connection?.call("FCUnpublish", arguments: fcPublishName) + async let _ = try? connection?.call("deleteStream", arguments: id) + } + + private func append(_ message: RTMPAudioMessage, type: RTMPChunkType) { + audioTimestamp.update(message, chunkType: type) + guard message.codec.isSupported else { + return + } + switch message.payload[1] { + case RTMPAACPacketType.seq.rawValue: + audioFormat = message.makeAudioFormat() + case RTMPAACPacketType.raw.rawValue: + if audioFormat == nil { + audioFormat = message.makeAudioFormat() + } + if let audioBuffer { + message.copyMemory(audioBuffer) + Task { await incoming.append(audioBuffer, when: audioTimestamp.value) } + } + default: + break + } + } + + private func append(_ message: RTMPVideoMessage, type: RTMPChunkType) { + videoTimestamp.update(message, chunkType: type) + guard RTMPTagType.video.headerSize <= message.payload.count && message.isSupported else { + return + } + if message.isExHeader { + // IsExHeader for Enhancing RTMP, FLV + switch message.packetType { + case RTMPVideoPacketType.sequenceStart.rawValue: + videoFormat = message.makeFormatDescription() + case RTMPVideoPacketType.codedFrames.rawValue: + Task { await incoming.append(message, presentationTimeStamp: videoTimestamp.value, formatDesciption: videoFormat) } + case RTMPVideoPacketType.codedFramesX.rawValue: + Task { await incoming.append(message, presentationTimeStamp: videoTimestamp.value, formatDesciption: videoFormat) } + default: + break + } + } else { + switch message.packetType { + case RTMPAVCPacketType.seq.rawValue: + videoFormat = message.makeFormatDescription() + case RTMPAVCPacketType.nal.rawValue: + Task { await incoming.append(message, presentationTimeStamp: videoTimestamp.value, formatDesciption: videoFormat) } + default: + break + } + } + } + + /// Creates flv metadata for a stream. + private func makeMetadata() -> AMFArray { + // https://github.com/shogo4405/HaishinKit.swift/issues/1410 + var metadata: AMFObject = ["duration": 0] + if outgoing.videoInputFormat != nil { + metadata["width"] = outgoing.videoSettings.videoSize.width + metadata["height"] = outgoing.videoSettings.videoSize.height + metadata["videocodecid"] = outgoing.videoSettings.format.codecid + metadata["videodatarate"] = outgoing.videoSettings.bitRate / 1000 + if let expectedFrameRate = outgoing.videoSettings.expectedFrameRate { + metadata["framerate"] = expectedFrameRate + } + } + if let audioFormat = outgoing.audioInputFormat?.audioStreamBasicDescription { + metadata["audiocodecid"] = outgoing.audioSettings.format.codecid + metadata["audiodatarate"] = outgoing.audioSettings.bitRate / 1000 + metadata["audiosamplerate"] = outgoing.audioSettings.format.makeSampleRate( + audioFormat.mSampleRate, + output: outgoing.audioSettings.sampleRate + ) + } + return AMFArray(metadata) + } +} + +extension RTMPStream: _Stream { + public func setAudioSettings(_ audioSettings: AudioCodecSettings) throws { + guard Self.supportedAudioCodecs.contains(audioSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.audioSettings = audioSettings + } + + public func setVideoSettings(_ videoSettings: VideoCodecSettings) throws { + guard Self.supportedVideoCodecs.contains(videoSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.videoSettings = videoSettings + } + + public func append(_ sampleBuffer: CMSampleBuffer) { + switch sampleBuffer.formatDescription?.mediaType { + case .video: + if sampleBuffer.formatDescription?.isCompressed == true { + do { + let decodeTimeStamp = sampleBuffer.decodeTimeStamp.isValid ? sampleBuffer.decodeTimeStamp : sampleBuffer.presentationTimeStamp + let timedelta = try videoTimestamp.update(decodeTimeStamp) + frameCount += 1 + videoFormat = sampleBuffer.formatDescription + guard let message = RTMPVideoMessage(streamId: id, timestamp: timedelta, sampleBuffer: sampleBuffer) else { + return + } + doOutput(.one, chunkStreamId: .video, message: message) + } catch { + logger.warn(error) + } + } else { + outgoing.append(sampleBuffer) + if sampleBuffer.formatDescription?.isCompressed == false { + outputs.forEach { + switch sampleBuffer.formatDescription?.mediaType { + case .audio: + if audioSampleAccess { + $0.stream(self, didOutput: sampleBuffer) + } + case .video: + if videoSampleAccess || ($0 is View) { + $0.stream(self, didOutput: sampleBuffer) + } + default: + $0.stream(self, didOutput: sampleBuffer) + } + } + } + } + default: + break + } + } + + public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + switch audioBuffer { + case let audioBuffer as AVAudioCompressedBuffer: + do { + let timedelta = try audioTimestamp.update(when) + audioFormat = audioBuffer.format + guard let message = RTMPAudioMessage(streamId: id, timestamp: timedelta, audioBuffer: audioBuffer) else { + return + } + doOutput(.one, chunkStreamId: .audio, message: message) + } catch { + logger.warn(error) + } + default: + outgoing.append(audioBuffer, when: when) + if audioBuffer is AVAudioPCMBuffer && audioSampleAccess { + outputs.forEach { $0.stream(self, didOutput: audioBuffer, when: when) } + } + } + } + + public func dispatch(_ event: NetworkMonitorEvent) async { + await bitRateStrategy?.adjustBitrate(event, stream: self) + currentFPS = frameCount + frameCount = 0 + info.update() + } +} + +extension RTMPStream: MediaMixerOutput { + // MARK: MediaMixerOutput + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) { + switch mediaType { + case .audio: + audioTrackId = id + case .video: + videoTrackId = id + default: + break + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { await append(sampleBuffer) } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + Task { await append(buffer, when: when) } + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStreamInfo.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStreamInfo.swift new file mode 100644 index 000000000..fc1103559 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPStreamInfo.swift @@ -0,0 +1,30 @@ +import Foundation + +/// A structor that provides the statistics related to the RTMPStream. +public struct RTMPStreamInfo: Sendable { + /// The number of bytes received by the RTMPStream. + public internal(set) var byteCount = 0 + /// The resource name of a stream. + public internal(set) var resourceName: String? + /// The number of bytes received per second by the RTMPStream. + public internal(set) var currentBytesPerSecond = 0 + private var previousByteCount = 0 + + mutating func update() { + currentBytesPerSecond = byteCount - previousByteCount + previousByteCount = byteCount + } + + mutating func clear() { + byteCount = 0 + currentBytesPerSecond = 0 + previousByteCount = 0 + } +} + +extension RTMPStreamInfo: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + public var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPTimestamp.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPTimestamp.swift new file mode 100644 index 000000000..071e110e2 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPTimestamp.swift @@ -0,0 +1,79 @@ +import AVFoundation +import CoreMedia +import Foundation + +protocol RTMPTimeConvertible { + var seconds: TimeInterval { get } +} + +private let kRTMPTimestamp_defaultTimeInterval: TimeInterval = 0 + +struct RTMPTimestamp { + enum Error: Swift.Error { + case invalidSequence + } + + private var startedAt = kRTMPTimestamp_defaultTimeInterval + private var updatedAt = kRTMPTimestamp_defaultTimeInterval + private var timedeltaFraction: TimeInterval = kRTMPTimestamp_defaultTimeInterval + + mutating func update(_ value: T) throws -> UInt32 { + guard updatedAt < value.seconds else { + throw Error.invalidSequence + } + if startedAt == 0 { + startedAt = value.seconds + updatedAt = value.seconds + return 0 + } + var timedelta = (value.seconds - updatedAt) * 1000 + timedeltaFraction += timedelta.truncatingRemainder(dividingBy: 1) + if 1 <= timedeltaFraction { + timedeltaFraction -= 1 + timedelta += 1 + } + updatedAt = value.seconds + return UInt32(timedelta) + } + + mutating func update(_ message: some RTMPMessage, chunkType: RTMPChunkType) { + switch chunkType { + case .zero: + if startedAt == 0 { + startedAt = TimeInterval(message.timestamp) / 1000 + updatedAt = TimeInterval(message.timestamp) / 1000 + } else { + updatedAt = TimeInterval(message.timestamp) / 1000 + } + default: + updatedAt += TimeInterval(message.timestamp) / 1000 + } + } + + mutating func clear() { + startedAt = kRTMPTimestamp_defaultTimeInterval + updatedAt = kRTMPTimestamp_defaultTimeInterval + timedeltaFraction = kRTMPTimestamp_defaultTimeInterval + } +} + +extension AVAudioTime: RTMPTimeConvertible { + var seconds: TimeInterval { + AVAudioTime.seconds(forHostTime: hostTime) + } +} + +extension RTMPTimestamp where T == AVAudioTime { + var value: AVAudioTime { + return AVAudioTime(hostTime: AVAudioTime.hostTime(forSeconds: updatedAt)) + } +} + +extension CMTime: RTMPTimeConvertible { +} + +extension RTMPTimestamp where T == CMTime { + var value: CMTime { + return CMTime(seconds: updatedAt, preferredTimescale: 1000) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPURL.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPURL.swift new file mode 100644 index 000000000..43d228bfb --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/RTMP/RTMPURL.swift @@ -0,0 +1,29 @@ +import Foundation + +struct RTMPURL { + let url: URL + + var streamName: String { + var pathComponents = url.pathComponents + pathComponents.removeFirst() + pathComponents.removeFirst() + if let query = URLComponents(url: url, resolvingAgainstBaseURL: false)?.query { + return pathComponents.joined(separator: "/") + "?" + query + } else { + return pathComponents.joined(separator: "/") + } + } + + var command: String { + let target = "/" + streamName + let urlString = url.absoluteString + guard let range = urlString.range(of: target) else { + return urlString + } + return urlString.replacingOccurrences(of: target, with: "", options: [], range: range) + } + + init(url: URL) { + self.url = url + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/ByteArray.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/ByteArray.swift new file mode 100644 index 000000000..a6aa59403 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/ByteArray.swift @@ -0,0 +1,398 @@ +import Foundation + +protocol ByteArrayConvertible { + var data: Data { get } + var length: Int { get set } + var position: Int { get set } + var bytesAvailable: Int { get } + + subscript(i: Int) -> UInt8 { get set } + + @discardableResult + func writeUInt8(_ value: UInt8) -> Self + func readUInt8() throws -> UInt8 + + @discardableResult + func writeInt8(_ value: Int8) -> Self + func readInt8() throws -> Int8 + + @discardableResult + func writeUInt16(_ value: UInt16) -> Self + func readUInt16() throws -> UInt16 + + @discardableResult + func writeInt16(_ value: Int16) -> Self + func readInt16() throws -> Int16 + + @discardableResult + func writeUInt24(_ value: UInt32) -> Self + func readUInt24() throws -> UInt32 + + @discardableResult + func writeUInt32(_ value: UInt32) -> Self + func readUInt32() throws -> UInt32 + + @discardableResult + func writeInt32(_ value: Int32) -> Self + func readInt32() throws -> Int32 + + @discardableResult + func writeUInt64(_ value: UInt64) -> Self + func readUInt64() throws -> UInt64 + + @discardableResult + func writeInt64(_ value: Int64) -> Self + func readInt64() throws -> Int64 + + @discardableResult + func writeDouble(_ value: Double) -> Self + func readDouble() throws -> Double + + @discardableResult + func writeFloat(_ value: Float) -> Self + func readFloat() throws -> Float + + @discardableResult + func writeUTF8(_ value: String) throws -> Self + func readUTF8() throws -> String + + @discardableResult + func writeUTF8Bytes(_ value: String) -> Self + func readUTF8Bytes(_ length: Int) throws -> String + + @discardableResult + func writeBytes(_ value: Data) -> Self + func readBytes(_ length: Int) throws -> Data + + @discardableResult + func clear() -> Self +} + +// MARK: - +/** + * The ByteArray class provides methods and properties the reading or writing with binary data. + */ +class ByteArray: ByteArrayConvertible { + static let fillZero: [UInt8] = [0x00] + + static let sizeOfInt8: Int = 1 + static let sizeOfInt16: Int = 2 + static let sizeOfInt24: Int = 3 + static let sizeOfInt32: Int = 4 + static let sizeOfFloat: Int = 4 + static let sizeOfInt64: Int = 8 + static let sizeOfDouble: Int = 8 + + /** + * The ByteArray error domain codes. + */ + enum Error: Swift.Error { + /// Error cause end of data. + case eof + /// Failed to parse + case parse + } + + /// Creates an empty ByteArray. + init() { + } + + /// Creates a ByteArray with data. + init(data: Data) { + self.data = data + } + + private(set) var data = Data() + + /// Specifies the length of buffer. + var length: Int { + get { + data.count + } + set { + switch true { + case (data.count < newValue): + data.append(Data(count: newValue - data.count)) + case (newValue < data.count): + data = data.subdata(in: 0.. UInt8 { + get { + data[i] + } + set { + data[i] = newValue + } + } + + /// Reading an UInt8 value. + func readUInt8() throws -> UInt8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return data[position] + } + + /// Writing an UInt8 value. + @discardableResult + func writeUInt8(_ value: UInt8) -> Self { + writeBytes(value.data) + } + + /// Readning an Int8 value. + func readInt8() throws -> Int8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return Int8(bitPattern: UInt8(data[position])) + } + + /// Writing an Int8 value. + @discardableResult + func writeInt8(_ value: Int8) -> Self { + writeBytes(UInt8(bitPattern: value).data) + } + + /// Readning an UInt16 value. + func readUInt16() throws -> UInt16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return UInt16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int16 value. + func readInt16() throws -> Int16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return Int16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt24 value. + func readUInt24() throws -> UInt32 { + guard ByteArray.sizeOfInt24 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt24 + return UInt32(data: ByteArray.fillZero + data[position - ByteArray.sizeOfInt24.. Self { + writeBytes(value.bigEndian.data.subdata(in: 1.. UInt32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return UInt32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int32 value. + func readInt32() throws -> Int32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return Int32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Writing an UInt64 value. + @discardableResult + func writeUInt64(_ value: UInt64) -> Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt64 value. + func readUInt64() throws -> UInt64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return UInt64(data: data[position - ByteArray.sizeOfInt64.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int64 value. + func readInt64() throws -> Int64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return Int64(data: data[position - ByteArray.sizeOfInt64.. Double { + guard ByteArray.sizeOfDouble <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfDouble + return Double(data: Data(data.subdata(in: position - ByteArray.sizeOfDouble.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a Float value. + func readFloat() throws -> Float { + guard ByteArray.sizeOfFloat <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfFloat + return Float(data: Data(data.subdata(in: position - ByteArray.sizeOfFloat.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a string as UTF8 value. + func readUTF8() throws -> String { + try readUTF8Bytes(Int(try readUInt16())) + } + + /// Writing a string as UTF8 value. + @discardableResult + func writeUTF8(_ value: String) throws -> Self { + let utf8 = Data(value.utf8) + return writeUInt16(UInt16(utf8.count)).writeBytes(utf8) + } + + /// Clear the buffer. + @discardableResult + func clear() -> Self { + position = 0 + data.removeAll() + return self + } + + func readUTF8Bytes(_ length: Int) throws -> String { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + + guard let result = String(data: data.subdata(in: position - length.. Self { + writeBytes(Data(value.utf8)) + } + + func readBytes(_ length: Int) throws -> Data { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + return data.subdata(in: position - length.. Self { + if position == data.count { + data.append(value) + position = data.count + return self + } + let length: Int = min(data.count, value.count) + data[position.. Void)) { + let r: Int = (data.count - position) % length + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex.advanced(by: -r), by: length) { + lambda(ByteArray(data: data.subdata(in: index.. [UInt32] { + let size: Int = MemoryLayout.size + if (data.endIndex - position) % size != 0 { + return [] + } + var result: [UInt32] = [] + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex, by: size) { + result.append(UInt32(data: data[index.. class +/// - seealso: https://www.objc.io/blog/2018/12/18/atomic-variables/ +public struct HKAtomic { + private let queue = DispatchQueue(label: "com.haishinkit.HaishinKit.Atomic", attributes: .concurrent) + private var _value: A + + /// Getter for the value. + public var value: A { + queue.sync { self._value } + } + + /// Creates an instance of value. + public init(_ value: A) { + self._value = value + } + + /// Setter for the value. + public mutating func mutate(_ transform: (inout A) -> Void) { + queue.sync(flags: .barrier) { + transform(&self._value) + } + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/MD5.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/MD5.swift new file mode 100755 index 000000000..ba729d36c --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Sources/Util/MD5.swift @@ -0,0 +1,200 @@ +import Foundation +import HaishinKit + +/** + Message Digest Algorithm 5 + - seealso: https://ja.wikipedia.org/wiki/MD5 + - seealso: https://www.ietf.org/rfc/rfc1321.txt + */ +enum MD5 { + static let a: UInt32 = 0x67452301 + static let b: UInt32 = 0xefcdab89 + static let c: UInt32 = 0x98badcfe + static let d: UInt32 = 0x10325476 + + static let S11: UInt32 = 7 + static let S12: UInt32 = 12 + static let S13: UInt32 = 17 + static let S14: UInt32 = 22 + static let S21: UInt32 = 5 + static let S22: UInt32 = 9 + static let S23: UInt32 = 14 + static let S24: UInt32 = 20 + static let S31: UInt32 = 4 + static let S32: UInt32 = 11 + static let S33: UInt32 = 16 + static let S34: UInt32 = 23 + static let S41: UInt32 = 6 + static let S42: UInt32 = 10 + static let S43: UInt32 = 15 + static let S44: UInt32 = 21 + + struct Context { + var a: UInt32 = MD5.a + var b: UInt32 = MD5.b + var c: UInt32 = MD5.c + var d: UInt32 = MD5.d + + mutating func FF(_ x: UInt32, _ s: UInt32, _ k: UInt32) { + let swap: UInt32 = d + let F: UInt32 = (b & c) | ((~b) & d) + d = c + c = b + b = b &+ rotateLeft(a &+ F &+ k &+ x, s) + a = swap + } + + mutating func GG(_ x: UInt32, _ s: UInt32, _ k: UInt32) { + let swap: UInt32 = d + let G: UInt32 = (d & b) | (c & (~d)) + d = c + c = b + b = b &+ rotateLeft(a &+ G &+ k &+ x, s) + a = swap + } + + mutating func HH(_ x: UInt32, _ s: UInt32, _ k: UInt32) { + let swap: UInt32 = d + let H: UInt32 = b ^ c ^ d + d = c + c = b + b = b &+ rotateLeft(a &+ H &+ k &+ x, s) + a = swap + } + + mutating func II(_ x: UInt32, _ s: UInt32, _ k: UInt32) { + let swap: UInt32 = d + let I: UInt32 = c ^ (b | (~d)) + d = c + c = b + b = b &+ rotateLeft(a &+ I &+ k &+ x, s) + a = swap + } + + func rotateLeft(_ x: UInt32, _ n: UInt32) -> UInt32 { + ((x << n) & 0xFFFFFFFF) | (x >> (32 - n)) + } + + var data: Data { + a.data + b.data + c.data + d.data + } + } + + static func base64(_ message: String) -> String { + calculate(message).base64EncodedString(options: .lineLength64Characters) + } + + static func calculate(_ message: String) -> Data { + calculate(ByteArray().writeUTF8Bytes(message).data) + } + + static func calculate(_ data: Data) -> Data { + var context = Context() + + let count: Data = UInt64(data.count * 8).bigEndian.data + let message = ByteArray(data: data + [0x80]) + message.length += 64 - (message.length % 64) + message[message.length - 8] = count[7] + message[message.length - 7] = count[6] + message[message.length - 6] = count[5] + message[message.length - 5] = count[4] + message[message.length - 4] = count[3] + message[message.length - 3] = count[2] + message[message.length - 2] = count[1] + message[message.length - 1] = count[0] + + // swiftlint:disable:this closure_body_length + message.sequence(64) { + let x: [UInt32] = $0.toUInt32() + + guard x.count == 16 else { + return + } + + var ctx = Context() + ctx.a = context.a + ctx.b = context.b + ctx.c = context.c + ctx.d = context.d + + /* Round 1 */ + ctx.FF(x[ 0], S11, 0xd76aa478) + ctx.FF(x[ 1], S12, 0xe8c7b756) + ctx.FF(x[ 2], S13, 0x242070db) + ctx.FF(x[ 3], S14, 0xc1bdceee) + ctx.FF(x[ 4], S11, 0xf57c0faf) + ctx.FF(x[ 5], S12, 0x4787c62a) + ctx.FF(x[ 6], S13, 0xa8304613) + ctx.FF(x[ 7], S14, 0xfd469501) + ctx.FF(x[ 8], S11, 0x698098d8) + ctx.FF(x[ 9], S12, 0x8b44f7af) + ctx.FF(x[10], S13, 0xffff5bb1) + ctx.FF(x[11], S14, 0x895cd7be) + ctx.FF(x[12], S11, 0x6b901122) + ctx.FF(x[13], S12, 0xfd987193) + ctx.FF(x[14], S13, 0xa679438e) + ctx.FF(x[15], S14, 0x49b40821) + + /* Round 2 */ + ctx.GG(x[ 1], S21, 0xf61e2562) + ctx.GG(x[ 6], S22, 0xc040b340) + ctx.GG(x[11], S23, 0x265e5a51) + ctx.GG(x[ 0], S24, 0xe9b6c7aa) + ctx.GG(x[ 5], S21, 0xd62f105d) + ctx.GG(x[10], S22, 0x2441453) + ctx.GG(x[15], S23, 0xd8a1e681) + ctx.GG(x[ 4], S24, 0xe7d3fbc8) + ctx.GG(x[ 9], S21, 0x21e1cde6) + ctx.GG(x[14], S22, 0xc33707d6) + ctx.GG(x[ 3], S23, 0xf4d50d87) + ctx.GG(x[ 8], S24, 0x455a14ed) + ctx.GG(x[13], S21, 0xa9e3e905) + ctx.GG(x[ 2], S22, 0xfcefa3f8) + ctx.GG(x[ 7], S23, 0x676f02d9) + ctx.GG(x[12], S24, 0x8d2a4c8a) + + /* Round 3 */ + ctx.HH(x[ 5], S31, 0xfffa3942) + ctx.HH(x[ 8], S32, 0x8771f681) + ctx.HH(x[11], S33, 0x6d9d6122) + ctx.HH(x[14], S34, 0xfde5380c) + ctx.HH(x[ 1], S31, 0xa4beea44) + ctx.HH(x[ 4], S32, 0x4bdecfa9) + ctx.HH(x[ 7], S33, 0xf6bb4b60) + ctx.HH(x[10], S34, 0xbebfbc70) + ctx.HH(x[13], S31, 0x289b7ec6) + ctx.HH(x[ 0], S32, 0xeaa127fa) + ctx.HH(x[ 3], S33, 0xd4ef3085) + ctx.HH(x[ 6], S34, 0x4881d05) + ctx.HH(x[ 9], S31, 0xd9d4d039) + ctx.HH(x[12], S32, 0xe6db99e5) + ctx.HH(x[15], S33, 0x1fa27cf8) + ctx.HH(x[ 2], S34, 0xc4ac5665) + + /* Round 4 */ + ctx.II(x[ 0], S41, 0xf4292244) + ctx.II(x[ 7], S42, 0x432aff97) + ctx.II(x[14], S43, 0xab9423a7) + ctx.II(x[ 5], S44, 0xfc93a039) + ctx.II(x[12], S41, 0x655b59c3) + ctx.II(x[ 3], S42, 0x8f0ccc92) + ctx.II(x[10], S43, 0xffeff47d) + ctx.II(x[ 1], S44, 0x85845dd1) + ctx.II(x[ 8], S41, 0x6fa87e4f) + ctx.II(x[15], S42, 0xfe2ce6e0) + ctx.II(x[ 6], S43, 0xa3014314) + ctx.II(x[13], S44, 0x4e0811a1) + ctx.II(x[ 4], S41, 0xf7537e82) + ctx.II(x[11], S42, 0xbd3af235) + ctx.II(x[ 2], S43, 0x2ad7d2bb) + ctx.II(x[ 9], S44, 0xeb86d391) + + context.a = context.a &+ ctx.a + context.b = context.b &+ ctx.b + context.c = context.c &+ ctx.c + context.d = context.d &+ ctx.d + } + + return context.data + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMF0SerializerTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMF0SerializerTests.swift new file mode 100644 index 000000000..1e989e79f --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMF0SerializerTests.swift @@ -0,0 +1,71 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct AMF0SerializerTests { + static let connectionChunk: AMFObject = [ + "tcUrl": "rtmp://localhost:1935/live", + "flashVer": "FMLE/3.0 (compatible; FMSc/1.0)", + "swfUrl": nil, + "app": "live", + "fpad": false, + "audioCodecs": Double(1024), + "videoCodecs": Double(128), + "videoFunction": Double(1), + "capabilities": Double(239), + "fourCcList": ["av01", "vp09", "hvc1"], + "pageUrl": nil, + "objectEncoding": Double(0) + ] + + @Test func connectionChunk() throws { + var amf: any AMFSerializer = AMF0Serializer() + amf.serialize(AMF0SerializerTests.connectionChunk) + amf.position = 0 + let result: AMFObject = try amf.deserialize() + for key in AMF0SerializerTests.connectionChunk.keys { + let value: Any? = result[key]! as Any? + switch key { + case "tcUrl": + #expect(value as? String == "rtmp://localhost:1935/live") + case "flashVer": + #expect(value as? String == "FMLE/3.0 (compatible; FMSc/1.0)") + case "swfUrl": + #expect(value == nil) + case "app": + #expect(value as? String == "live") + case "fpad": + #expect(value as? Bool == false) + case "audioCodecs": + #expect(value as? Double == Double(1024)) + case "videoCodecs": + #expect(value as? Double == Double(128)) + case "videoFunction": + #expect(value as? Double == Double(1)) + case "capabilities": + #expect(value as? Double == Double(239)) + case "pageUrl": + #expect(value == nil) + case "fourCcList": + #expect(value as? [String] == ["av01", "vp09", "hvc1"]) + case "objectEncoding": + #expect(value as? Double == Double(0)) + default: + Issue.record(key.debugDescription as! (any Error)) + } + } + } + + @Test func asarray() throws { + var array = AMFArray() + array["hello"] = "world" + array["world"] = "hello" + var amf: any AMFSerializer = AMF0Serializer() + amf.serialize(array) + amf.position = 0 + let result: AMFArray = try amf.deserialize() + #expect(array["hello"] as? String == result["hello"] as? String) + #expect(array["world"] as? String == result["world"] as? String) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMFFoundationTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMFFoundationTests.swift new file mode 100644 index 000000000..9f64d0687 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/AMF/AMFFoundationTests.swift @@ -0,0 +1,26 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct AMFFoundationTests { + static let hello: String = "hello" + + @Test func array() { + var array = AMFArray() + array[5] = "hoge" + if let array_5: String = array[5] as? String { + #expect(array_5 == "hoge") + } + } + + @Test func xmlDocument() { + let xml = AMFXMLDocument(data: AMFFoundationTests.hello) + #expect(xml.description == AMFFoundationTests.hello) + } + + @Test func xml() { + let xml = AMFXML(data: AMFFoundationTests.hello) + #expect(xml.description == AMFFoundationTests.hello) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/AVCDecoderConfigurationRecordTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/AVCDecoderConfigurationRecordTests.swift new file mode 100644 index 000000000..03216e52a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/AVCDecoderConfigurationRecordTests.swift @@ -0,0 +1,21 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct AVCDecoderConfigurationRecordTests { + @Test func main_1() { + let data = Data([1, 66, 0, 40, 255, 225, 0, 16, 39, 66, 0, 40, 171, 64, 60, 3, 143, 83, 77, 192, 128, 128, 128, 128, 1, 0, 4, 40, 206, 60, 128]) + let avcc = AVCDecoderConfigurationRecord(data: data) + let formatDescription = avcc.makeFormatDescription() + #expect(formatDescription != nil) + } + + @Test func main_2() { + let data = Data([1, 66, 0, 40, 255, 225, 0, 11, 39, 66, 0, 40, 171, 64, 60, 3, 143, 83, 32, 1, 0, 4, 40, 206, 60, 128]) + let avcc = AVCDecoderConfigurationRecord(data: data) + let formatDescription = avcc.makeFormatDescription() + #expect(formatDescription != nil) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/HEVCDecoderConfigurationRecordTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/HEVCDecoderConfigurationRecordTests.swift new file mode 100644 index 000000000..f23041d94 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Codec/HEVCDecoderConfigurationRecordTests.swift @@ -0,0 +1,14 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct HEVCDecoderConfigurationRecordTests { + @Test func main() { + let data = Data([1, 1, 96, 0, 0, 0, 176, 0, 0, 0, 0, 0, 93, 240, 0, 252, 253, 248, 248, 0, 0, 15, 3, 32, 0, 1, 0, 24, 64, 1, 12, 1, 255, 255, 1, 96, 0, 0, 3, 0, 176, 0, 0, 3, 0, 0, 3, 0, 93, 21, 192, 144, 33, 0, 1, 0, 36, 66, 1, 1, 1, 96, 0, 0, 3, 0, 176, 0, 0, 3, 0, 0, 3, 0, 93, 160, 2, 40, 128, 39, 28, 178, 226, 5, 123, 145, 101, 83, 80, 16, 16, 16, 8, 34, 0, 1, 0, 7, 68, 1, 192, 44, 188, 20, 201]) + let hevc = HEVCDecoderConfigurationRecord(data: data) + let formatDescription = hevc.makeFormatDescription() + #expect(formatDescription != nil) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Extension/Foundation+ExtensionTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Extension/Foundation+ExtensionTests.swift new file mode 100644 index 000000000..2b2633271 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Extension/Foundation+ExtensionTests.swift @@ -0,0 +1,13 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct FoundationExtensionTest { + @Test func nSURL() { + let url = URL(string: "http://localhost/foo/bar?hello=world!!&foo=bar")! + let dictionary: [String: String] = url.dictionaryFromQuery() + #expect(dictionary["hello"] == "world!!") + #expect(dictionary["foo"] == "bar") + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPChunkBufferTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPChunkBufferTests.swift new file mode 100644 index 000000000..9bf10c677 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPChunkBufferTests.swift @@ -0,0 +1,128 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct RTMPChunkBufferTests { + static let readData = Data([2, 0, 0, 0, 0, 0, 4, 5, 0, 0, 0, 0, 0, 76, 75, 64, 2, 0, 0, 0, 0, 0, 5, 6, 0, 0, 0, 0, 0, 76, 75, 64, 2, 2, 0, 0, 0, 0, 0, 4, 1, 0, 0, 0, 0, 0, 0, 32, 0, 3, 0, 0, 0, 0, 0, 190, 20, 0, 0, 0, 0, 2, 0, 7, 95, 114, 101, 115, 117, 108, 116, 0, 63, 240, 0, 0, 0, 0, 0, 0, 3, 0, 6, 102, 109, 115, 86, 101, 114, 2, 0, 13, 70, 77, 83, 47, 51, 44, 48, 44, 49, 44, 49, 50, 51, 0, 12, 99, 97, 112, 97, 98, 105, 108, 105, 116, 105, 101, 115, 0, 64, 63, 0, 0, 0, 0, 0, 0, 0, 0, 9, 3, 0, 5, 108, 101, 118, 101, 108, 2, 0, 6, 115, 116, 97, 116, 117, 115, 0, 4, 99, 111, 100, 101, 2, 0, 29, 78, 101, 116, 67, 111, 110, 110, 101, 99, 116, 105, 111, 110, 46, 67, 111, 110, 110, 101, 99, 116, 46, 83, 117, 99, 99, 101, 115, 115, 0, 11, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 2, 0, 21, 67, 111, 110, 110, 101, 99, 116, 105, 111, 110, 32, 115, 117, 99, 99, 101, 101, 100, 101, 100, 46, 0, 14, 111, 98, 106, 101, 99, 116, 69, 110, 99, 111, 100, 105, 110, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9]) + static let readDataBufferUnderflow = Data([2, 0, 0, 0, 0, 0, 4, 5, 0, 0, 0, 0, 0, 76, 75, 64, 2, 0, 0, 0, 0, 0, 5, 6, 0, 0, 0, 0, 0, 76, 75, 64, 2, 2, 0, 0, 0, 0, 0, 4, 1, 0, 0, 0, 0, 0, 0, 32, 0, 3, 0, 0, 0, 0, 0, 190, 20, 0, 0, 0, 0, 2, 0, 7, 95, 114, 101, 115, 117, 108, 116, 0, 63, 240, 0, 0, 0, 0, 0, 0, 3, 0, 6, 102, 109, 115, 86, 101, 114, 2, 0, 13, 70, 77, 83, 47, 51, 44, 48, 44, 49, 44, 49, 50, 51, 0, 12, 99, 97, 112, 97, 98, 105, 108, 105, 116, 105, 101, 115, 0, 64, 63, 0, 0, 0, 0, 0, 0, 0, 0, 9, 3, 0, 5, 108, 101, 118, 101, 108, 2, 0, 6, 115, 116, 97, 116, 117, 115, 0, 4, 99, 111, 100, 101, 2, 0, 29, 78, 101, 116, 67, 111, 110, 110, 101, 99, 116, 105, 111, 110, 46, 67, 111, 110, 110, 101, 99, 116, 46, 83, 117, 99, 99, 101, 115, 115, 0, 11, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 2, 0, 21, 67, 111, 110, 110, 101, 99, 116, 105, 111, 110, 32, 115, 117, 99, 99, 101, 101, 100, 101, 100, 46, 0, 14, 111, 98, 106, 101, 99, 116, 69, 110, 99, 111, 100, 105, 110, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) + + @Test func read() { + let buffer = RTMPChunkBuffer() + buffer.put(Self.readData) + + do { + let (chunkType, chunkStreamId) = try buffer.getBasicHeader() + #expect(chunkType == .zero) + #expect(chunkStreamId == 2) + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + let message = header.makeMessage() as? RTMPWindowAcknowledgementSizeMessage + #expect(message?.size == 5000000) + } catch { + } + + do { + let (chunkType, chunkStreamId) = try buffer.getBasicHeader() + #expect(chunkType == .zero) + #expect(chunkStreamId == 2) + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + let message = header.makeMessage() as? RTMPSetPeerBandwidthMessage + #expect(message?.size == 5000000) + } catch { + } + + do { + let (chunkType, chunkStreamId) = try buffer.getBasicHeader() + #expect(chunkType == .zero) + #expect(chunkStreamId == 2) + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + let message = header.makeMessage() as? RTMPSetChunkSizeMessage + #expect(message?.size == 8192) + buffer.chunkSize = 8192 + } catch { + } + + do { + let (chunkType, chunkStreamId) = try buffer.getBasicHeader() + #expect(chunkType == .zero) + #expect(chunkStreamId == 3) + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + let message = header.makeMessage() as? RTMPCommandMessage + #expect(message?.commandName == "_result") + } catch { + } + } + + @Test func readBufferUnderflow() { + let buffer = RTMPChunkBuffer() + buffer.chunkSize = 8192 + buffer.put(Self.readDataBufferUnderflow) + + var rollbackPosition = buffer.position + var count = 0 + do { + while buffer.hasRemaining { + rollbackPosition = buffer.position + let (chunkType, _) = try buffer.getBasicHeader() + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + count += 1 + } + } catch RTMPChunkError.bufferUnderflow { + buffer.position = rollbackPosition + } catch { + } + #expect(rollbackPosition == 49) + #expect(count == 3) + buffer.put(Data([0, 9])) + do { + let (chunkType, _) = try buffer.getBasicHeader() + let header = RTMPChunkMessageHeader() + try buffer.getMessageHeader(chunkType, messageHeader: header) + let message = header.makeMessage() as? RTMPCommandMessage + #expect(message?.commandName == "_result") + } catch { + } + } + + @Test func chunkSize() { + let buffer = RTMPChunkBuffer() + buffer.chunkSize = 8192 + buffer.chunkSize = 128 + buffer.chunkSize = 8192 + } + + @Test func write() { + let buffer = RTMPChunkBuffer() + let connection = RTMPCommandMessage( + streamId: 0, + transactionId: 0, + objectEncoding: .amf0, + commandName: "hello", + commandObject: nil, + arguments: [] + ) + let iterator = buffer.putMessage(.zero, chunkStreamId: 1, message: connection) + #expect(iterator.next() == Data([1, 0, 0, 0, 0, 0, 18, 20, 0, 0, 0, 0, 2, 0, 5, 104, 101, 108, 108, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5])) + } + + @Test func writeChunkSize() { + let buffer = RTMPChunkBuffer() + let connection = RTMPCommandMessage( + streamId: 0, + transactionId: 0, + objectEncoding: .amf0, + commandName: [String](repeating: "a", count: 128 + 56).joined(), + commandObject: nil, + arguments: [] + ) + let iterator = buffer.putMessage(.zero, chunkStreamId: 1, message: connection) + #expect(iterator.next() == Data([1, 0, 0, 0, 0, 0, 197, 20, 0, 0, 0, 0, 2, 0, 184, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97])) + #expect(iterator.next() == Data([193, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5])) + #expect(iterator.next() == nil) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPConnectionTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPConnectionTests.swift new file mode 100644 index 000000000..c2210036d --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPConnectionTests.swift @@ -0,0 +1,17 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct RTMPConnectionTests { + @Test func releaseWhenClose() async throws { + weak var weakConnection: RTMPConnection? + _ = try? await { + let connection = RTMPConnection() + _ = try await connection.connect("rtmp://localhost:19350/live") + try await connection.close() + weakConnection = connection + }() + #expect(weakConnection == nil) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPStatusTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPStatusTests.swift new file mode 100644 index 000000000..ea3062a1e --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPStatusTests.swift @@ -0,0 +1,21 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct RTMPStatusTests { + @Test func dynamicMemeberLookup() { + let data: AMFObject = [ + "level": "status", + "code": "NetConnection.Connect.Success", + "description": "Connection succeeded.", + "objectEncoding": 0.0, + "hello": "world!!" + ] + guard let status = RTMPStatus(data) else { + return + } + #expect("world!!" == status.hello) + #expect(0.0 == status.objectEncoding) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPTimestampTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPTimestampTests.swift new file mode 100644 index 000000000..fe05427ea --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPTimestampTests.swift @@ -0,0 +1,43 @@ +import AVFoundation +import Foundation +@testable import RTMPHaishinKit +import Testing + +@Suite struct RTMPTimestampTests { + @Test func updateCMTime() throws { + let times: [CMTime] = [ + CMTime(value: 286340171565869, timescale: 1000000000), + CMTime(value: 286340204889958, timescale: 1000000000), + CMTime(value: 286340238223357, timescale: 1000000000), + CMTime(value: 286340271560111, timescale: 1000000000), + CMTime(value: 286340304906325, timescale: 1000000000), + CMTime(value: 286340338232723, timescale: 1000000000), + CMTime(value: 286340338232723, timescale: 1000000000) + ] + var timestamp = RTMPTimestamp() + #expect(try timestamp.update(times[0]) == 0) + #expect(try timestamp.update(times[1]) == 33) + #expect(try timestamp.update(times[2]) == 33) + #expect(try timestamp.update(times[3]) == 33) + #expect(try timestamp.update(times[4]) == 34) + #expect(try timestamp.update(times[5]) == 33) + } + + @Test func updateAVAudioTime() throws { + let times: [AVAudioTime] = [ + .init(hostTime: 6901294874500, sampleTime: 13802589749, atRate: 48000), + .init(hostTime: 6901295386500, sampleTime: 13802590773, atRate: 48000), + .init(hostTime: 6901295898500, sampleTime: 13802591797, atRate: 48000), + .init(hostTime: 6901296410500, sampleTime: 13802592821, atRate: 48000), + .init(hostTime: 6901296922500, sampleTime: 13802593845, atRate: 48000), + .init(hostTime: 6901297434500, sampleTime: 13802594869, atRate: 48000) + ] + var timestamp = RTMPTimestamp() + #expect(try timestamp.update(times[0]) == 0) + #expect(try timestamp.update(times[1]) == 21) + #expect(try timestamp.update(times[2]) == 21) + #expect(try timestamp.update(times[3]) == 22) + #expect(try timestamp.update(times[4]) == 21) + #expect(try timestamp.update(times[5]) == 21) + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPURLTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPURLTests.swift new file mode 100644 index 000000000..81c66284f --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPURLTests.swift @@ -0,0 +1,19 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct RTMPURLTests { + @Test func main() { + let url = RTMPURL(url: URL(string: "rtmp://localhost/live/live")!) + #expect(url.streamName == "live") + #expect(url.command == "rtmp://localhost/live") + } + + @Test func query() { + let url = RTMPURL(url: URL(string: "rtmp://localhost/live/live?parameter")!) + #expect(url.streamName == "live?parameter") + #expect(url.command == "rtmp://localhost/live") + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPVideoFourCCTests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPVideoFourCCTests.swift new file mode 100644 index 000000000..a404b17b5 --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/RTMP/RTMPVideoFourCCTests.swift @@ -0,0 +1,21 @@ +import AVFoundation +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct RTMPVideoFourCCTests { + @Test func main() { + #expect("av01" == str4(n: Int(RTMPVideoFourCC.av1.rawValue))) + #expect("hvc1" == str4(n: Int(RTMPVideoFourCC.hevc.rawValue))) + #expect("vp09" == str4(n: Int(RTMPVideoFourCC.vp9.rawValue))) + } + + func str4(n: Int) -> String { + var result = String(UnicodeScalar((n >> 24) & 255)?.description ?? "") + result.append(UnicodeScalar((n >> 16) & 255)?.description ?? "") + result.append(UnicodeScalar((n >> 8) & 255)?.description ?? "") + result.append(UnicodeScalar(n & 255)?.description ?? "") + return result + } +} diff --git a/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Util/MD5Tests.swift b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Util/MD5Tests.swift new file mode 100755 index 000000000..a1da30f2a --- /dev/null +++ b/Vendor/HaishinKit.swift/RTMPHaishinKit/Tests/Util/MD5Tests.swift @@ -0,0 +1,23 @@ +import Foundation +import Testing + +@testable import RTMPHaishinKit + +@Suite struct MD5Tests { + func hex(_ data: Data) -> String { + var hash = "" + for i in 0.. CMFormatDescription? { + guard + let pps = first(where: { $0.type == .pps }), + let sps = first(where: { $0.type == .sps }) else { + return nil + } + var formatDescription: CMFormatDescription? + let status = pps.data.withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> OSStatus in + guard let ppsBaseAddress = ppsBuffer.baseAddress else { + return kCMFormatDescriptionBridgeError_InvalidParameter + } + return sps.data.withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> OSStatus in + guard let spsBaseAddress = spsBuffer.baseAddress else { + return kCMFormatDescriptionBridgeError_InvalidParameter + } + let pointers: [UnsafePointer] = [ + spsBaseAddress.assumingMemoryBound(to: UInt8.self), + ppsBaseAddress.assumingMemoryBound(to: UInt8.self) + ] + let sizes: [Int] = [spsBuffer.count, ppsBuffer.count] + return CMVideoFormatDescriptionCreateFromH264ParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: pointers.count, + parameterSetPointers: pointers, + parameterSetSizes: sizes, + nalUnitHeaderLength: nalUnitHeaderLength, + formatDescriptionOut: &formatDescription + ) + } + } + if status != noErr { + logger.error(status) + } + return formatDescription + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/CMFormatDescription+Extension.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/CMFormatDescription+Extension.swift new file mode 100644 index 000000000..1cd2db84a --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/CMFormatDescription+Extension.swift @@ -0,0 +1,19 @@ +import CoreMedia +import Foundation + +extension CMFormatDescription { + var streamType: ESStreamType { + switch mediaSubType { + case .hevc: + return .h265 + case .h264: + return .h264 + case .mpeg4AAC_LD: + return .adtsAac + case .mpeg4AAC: + return .adtsAac + default: + return .unspecific + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/Data+Extension.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/Data+Extension.swift new file mode 100644 index 000000000..9b7609064 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/Data+Extension.swift @@ -0,0 +1,18 @@ +import Foundation + +extension Data { + func chunk(_ size: Int) -> [Data] { + if count < size { + return [self] + } + var chunks: [Data] = [] + let length = count + var offset = 0 + repeat { + let thisChunkSize = ((length - offset) > size) ? size : (length - offset) + chunks.append(subdata(in: offset.. CMFormatDescription? { + guard + let vps = first(where: { $0.type == .vps }), + let sps = first(where: { $0.type == .sps }), + let pps = first(where: { $0.type == .pps }) else { + return nil + } + return vps.data.withUnsafeBytes { (vpsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let vpsBaseAddress = vpsBuffer.baseAddress else { + return nil + } + return sps.data.withUnsafeBytes { (spsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let spsBaseAddress = spsBuffer.baseAddress else { + return nil + } + return pps.data.withUnsafeBytes { (ppsBuffer: UnsafeRawBufferPointer) -> CMFormatDescription? in + guard let ppsBaseAddress = ppsBuffer.baseAddress else { + return nil + } + var formatDescriptionOut: CMFormatDescription? + let pointers: [UnsafePointer] = [ + vpsBaseAddress.assumingMemoryBound(to: UInt8.self), + spsBaseAddress.assumingMemoryBound(to: UInt8.self), + ppsBaseAddress.assumingMemoryBound(to: UInt8.self) + ] + let sizes: [Int] = [vpsBuffer.count, spsBuffer.count, ppsBuffer.count] + CMVideoFormatDescriptionCreateFromHEVCParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: pointers.count, + parameterSetPointers: pointers, + parameterSetSizes: sizes, + nalUnitHeaderLength: nalUnitHeaderLength, + extensions: nil, + formatDescriptionOut: &formatDescriptionOut + ) + return formatDescriptionOut + } + } + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/NALUnitReader+Extension.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/NALUnitReader+Extension.swift new file mode 100644 index 000000000..5a2f4775c --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/NALUnitReader+Extension.swift @@ -0,0 +1,18 @@ +import CoreMedia +import Foundation +import HaishinKit + +extension NALUnitReader { + func makeFormatDescription(_ data: inout Data, type: ESStreamType) -> CMFormatDescription? { + switch type { + case .h264: + let units = read(&data, type: H264NALUnit.self) + return units.makeFormatDescription(nalUnitHeaderLength) + case .h265: + let units = read(&data, type: HEVCNALUnit.self) + return units.makeFormatDescription(nalUnitHeaderLength) + default: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift new file mode 100644 index 000000000..07f56a833 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift @@ -0,0 +1,30 @@ +import Foundation + +extension sockaddr_in { + var size: Int { + return MemoryLayout.size(ofValue: self) + } + + init?(_ host: String, port: Int) { + self.init() + self.sin_family = sa_family_t(AF_INET) + self.sin_port = CFSwapInt16BigToHost(UInt16(port)) + if inet_pton(AF_INET, host, &sin_addr) == 1 { + return + } + guard let hostent = gethostbyname(host), hostent.pointee.h_addrtype == AF_INET else { + return nil + } + if let h_addr_list = hostent.pointee.h_addr_list[0] { + self.sin_addr = UnsafeRawPointer(h_addr_list).assumingMemoryBound(to: in_addr.self).pointee + } else { + return nil + } + } + + mutating func makeSockaddr() -> sockaddr { + var address = sockaddr() + memcpy(&address, &self, size) + return address + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTConnection.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTConnection.swift new file mode 100644 index 000000000..bcd8fd896 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTConnection.swift @@ -0,0 +1,183 @@ +import Combine +import Foundation +import HaishinKit +import libsrt + +/// An actor that provides the interface to control a SRT connection. +/// +/// Supports a one-to-one connection. Multiple connections cannot be established. +public actor SRTConnection: NetworkConnection { + /// The error domain codes. + public enum Error: Swift.Error { + /// An invalid internal stare. + case invalidState + /// The uri isn’t supported. + case unsupportedUri(_ uri: URL?) + /// The failed to connect. + case failedToConnect(_ reason: SRTRejectReason) + } + + /// The SRT Library version. + public static let version: String = SRT_VERSION_STRING + /// The URI passed to the `connect()` method. + public private(set) var uri: URL? + /// This instance connect to server(true) or not(false) + @Published public private(set) var connected = false + /// The performance data. + public var performanceData: SRTPerformanceData? { + get async { + return await socket?.performanceData + } + } + + private var socket: SRTSocket? + private var streams: [SRTStream] = [] + private var listener: SRTSocket? + private var networkMonitor: NetworkMonitor? + + /// Creates an object. + public init() { + srt_startup() + socket = SRTSocket() + } + + deinit { + streams.removeAll() + srt_cleanup() + } + + /// Gets a SRTSocketOption. + public func getSocketOption(_ name: SRTSocketOption.Name) async throws -> SRTSocketOption? { + try await socket?.getSocketOption(name) + } + + /// Sets a SRTSocketOption. + public func setSocketOption(_ option: SRTSocketOption) async throws { + if connected { + guard option.name.restriction == .post else { + throw Error.invalidState + } + try await socket?.setSocketOption(option) + } else { + guard option.name.restriction == .pre else { + throw Error.invalidState + } + try await socket?.setSocketOption(option) + } + } + + /// Creates a connection to the server or waits for an incoming connection. + /// + /// - Parameters: + /// - uri: You can specify connection options in the URL. This follows the standard SRT format. + /// + /// - srt://192.168.1.1:9000?mode=caller + /// - Connect to the specified server. + /// - srt://:9000?mode=listener + /// - Wait for connections as a server. + public func connect(_ uri: URL?) async throws { + guard let url = SRTSocketURL(uri) else { + throw Error.unsupportedUri(uri) + } + do { + try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in + Task { + do { + try await socket?.open(url) + self.uri = uri + switch url.mode { + case .caller: + break + case .listener: + listener = socket + socket = try await listener?.accept(url.options) + await listener?.stopRunning() + listener = nil + case .rendezvous: + break + } + connected = await socket?.status == .connected + continuation.resume() + } catch { + socket = SRTSocket() + continuation.resume(throwing: error) + } + } + } + Task { + guard let socket else { + return + } + let networkMonitor = await socket.makeNetworkMonitor() + self.networkMonitor = networkMonitor + await networkMonitor.startRunning() + for await event in await networkMonitor.event { + for stream in streams { + await stream.dispatch(event) + } + } + } + } catch let error as SRTSocket.Error { + switch error { + case .rejected(let reason): + throw Error.failedToConnect(reason) + default: + throw Error.invalidState + } + } catch { + throw Error.invalidState + } + } + + /// Closes a connection. + public func close() async { + guard uri != nil else { + return + } + await networkMonitor?.stopRunning() + networkMonitor = nil + for stream in streams { + await stream.close() + } + await socket?.stopRunning() + socket = nil + await listener?.stopRunning() + listener = nil + uri = nil + connected = false + socket = SRTSocket() + } + + func send(_ data: Data) async { + do { + try await socket?.send(data) + } catch { + await close() + } + } + + func recv() { + Task { + guard let socket else { + return + } + for await data in await socket.inputs { + await streams.first?.doInput(data) + } + await close() + } + } + + func addStream(_ stream: SRTStream) { + guard !streams.contains(where: { $0 === stream }) else { + return + } + streams.append(stream) + } + + func removeStream(_ stream: SRTStream) { + if let index = streams.firstIndex(where: { $0 === stream }) { + streams.remove(at: index) + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTLogger.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTLogger.swift new file mode 100644 index 000000000..e72a2be27 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTLogger.swift @@ -0,0 +1,140 @@ +import Foundation +import libsrt + +/// An actor for writing interpolated string messages to srt logging system. +public actor SRTLogger { + /// The singleton logger instance. + public static let shared = SRTLogger() + + public enum Level: Sendable { + /// Highly detailed and very frequent messages. + case debug + /// Occasionally displayed information. + case notice + /// Unusual behavior. + case warning + /// Abnormal behavior + case error + /// Error that makes the current socket unusabl + case crit + + var value: Int32 { + switch self { + case .debug: + return LOG_DEBUG + case .notice: + return LOG_NOTICE + case .warning: + return LOG_WARNING + case .error: + return LOG_ERR + case .crit: + return LOG_CRIT + } + } + } + + /// Constants that indicate the addition to levels the logging system has functional areas. + public enum FunctionalArea: Int32, Sendable { + /// General uncategorized log, for serious issues only + case general = 0 + /// Socket create/open/close/configure activities + case bstats = 1 + /// Connection establishment and handshake + case control = 2 + /// The checkTimer and around activities + case data = 3 + /// The TsBPD thread + case tsbpd = 4 + /// System resource allocation and management + case rsrc = 5 + /// Haicrypt module area + case haicrypt = 6 + /// Congestion control module + case congest = 7 + /// Packet filter module + case pfilter = 8 + /// Applications + case applog + /// API part for socket and library managmenet + case apiCtrl = 11 + /// Queue control activities + case queCtrl = 13 + /// EPoll, internal update activities + case epollUpd = 16 + /// API part for receiving + case apiRecv = 21 + /// Buffer, receiving side + case bufRecv = 22 + /// Queue, receiving side + case queRecv = 23 + /// CChannel, receiving side + case chanRecv = 24 + /// Group, receiving side + case grpRecv = 25 + /// API part for sending + case apiSend = 31 + /// Buffer, sending side + case bufSend = 32 + /// Queue, sending side + case queSend = 33 + /// CChannel, sending side + case chnSend = 34 + /// Group, sending side + case grpSend = 35 + /// Internal activities not connected directly to a socket + case `internal` = 41 + /// Queue, management part + case queMgmt = 43 + /// CChannel, management part + case chnMgmt = 44 + /// Group, management part + case grpMgmt = 45 + /// EPoll, API part + case epollApi = 46 + + func addLogFA() { + srt_addlogfa(rawValue) + } + + func delLogFA() { + srt_dellogfa(rawValue) + } + } + + private init() { + srt_setloglevel(level.value) + } + + /// The current logging level. + public private(set) var level: Level = .notice { + didSet { + guard level != oldValue else { + return + } + srt_setloglevel(level.value) + } + } + + /// The current logging functional areas. + public private(set) var functionalAreas: Set = [] { + didSet { + for area in oldValue.subtracting(functionalAreas) { + area.delLogFA() + } + for area in functionalAreas.subtracting(oldValue) { + area.addLogFA() + } + } + } + + /// Sets the current logging level. + public func setLevel(_ level: Level) { + self.level = level + } + + /// Sets the current logging functional areas. + public func setFunctionalAreas(_ functionalAreas: Set) { + self.functionalAreas = functionalAreas + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTMode.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTMode.swift new file mode 100644 index 000000000..8073f0d28 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTMode.swift @@ -0,0 +1,12 @@ +import Foundation +import libsrt + +/// The type of SRTHaishinKit supports srt modes. +enum SRTMode: String, Sendable { + /// The caller mode. + case caller + /// The listener mode. + case listener + /// The rendezvous mode. + case rendezvous +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTPerformanceData.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTPerformanceData.swift new file mode 100644 index 000000000..25dd27a58 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTPerformanceData.swift @@ -0,0 +1,232 @@ +import Foundation +import libsrt + +/// The SRTPerformanceData represents the SRT's performance statistics. This struct is wrapper for an CBytePerfMon. +/// - seealso: https://github.com/Haivision/srt/blob/master/srtcore/srt.h +public struct SRTPerformanceData: Sendable { + /// The time since the UDT entity is started, in milliseconds. + public let msTimeStamp: Int64 + /// The total number of sent data packets, including retransmissions. + public let pktSentTotal: Int64 + /// The total number of received packets. + public let pktRecvTotal: Int64 + /// The total number of lost packets (sender side) + public let pktSndLossTotal: Int32 + /// The total number of lost packets (receiver side) + public let pktRcvLossTotal: Int32 + /// The total number of retransmitted packets + public let pktRetransTotal: Int32 + /// The total number of sent ACK packets + public let pktSentACKTotal: Int32 + /// The total number of received ACK packets + public let pktRecvACKTotal: Int32 + /// The total number of sent NAK packets + public let pktSentNAKTotal: Int32 + /// The total number of received NAK packets + public let pktRecvNAKTotal: Int32 + /// The total time duration when UDT is sending data (idle time exclusive) + public let usSndDurationTotal: Int64 + /// The number of too-late-to-send dropped packets + public let pktSndDropTotal: Int32 + /// The number of too-late-to play missing packets + public let pktRcvDropTotal: Int32 + /// The number of undecrypted packets + public let pktRcvUndecryptTotal: Int32 + /// The total number of sent data bytes, including retransmissions + public let byteSentTotal: UInt64 + /// The total number of received bytes + public let byteRecvTotal: UInt64 + /// The total number of lost bytes + public let byteRcvLossTotal: UInt64 + /// The total number of retransmitted bytes + public let byteRetransTotal: UInt64 + /// The number of too-late-to-send dropped bytes + public let byteSndDropTotal: UInt64 + /// The number of too-late-to play missing bytes (estimate based on average packet size) + public let byteRcvDropTotal: UInt64 + /// The number of undecrypted bytes + public let byteRcvUndecryptTotal: UInt64 + /// The number of sent data packets, including retransmissions + public let pktSent: Int64 + /// The number of received packets + public let pktRecv: Int64 + /// The number of lost packets (sender side) + public let pktSndLoss: Int32 + /// The number of lost packets (receiver side) + public let pktRcvLoss: Int32 + /// The number of retransmitted packets + public let pktRetrans: Int32 + /// The number of retransmitted packets received + public let pktRcvRetrans: Int32 + /// The number of sent ACK packets + public let pktSentACK: Int32 + /// The number of received ACK packets + public let pktRecvACK: Int32 + /// The number of sent NAK packets + public let pktSentNAK: Int32 + /// The number of received NAK packets + public let pktRecvNAK: Int32 + /// The sending rate in Mb/s + public let mbpsSendRate: Double + /// The receiving rate in Mb/s + public let mbpsRecvRate: Double + /// The busy sending time (i.e., idle time exclusive) + public let usSndDuration: Int64 + /// The size of order discrepancy in received sequences + public let pktReorderDistance: Int32 + /// The average time of packet delay for belated packets (packets with sequence past the ACK) + public let pktRcvAvgBelatedTime: Double + /// The number of received AND IGNORED packets due to having come too late + public let pktRcvBelated: Int64 + /// The number of too-late-to-send dropped packets + public let pktSndDrop: Int32 + /// The number of too-late-to play missing packets + public let pktRcvDrop: Int32 + /// The number of undecrypted packets + public let pktRcvUndecrypt: Int32 + /// The number of sent data bytes, including retransmissions + public let byteSent: UInt64 + /// The number of received bytes + public let byteRecv: UInt64 + /// The number of retransmitted bytes + public let byteRcvLoss: UInt64 + /// The number of retransmitted bytes + public let byteRetrans: UInt64 + /// The number of too-late-to-send dropped bytes + public let byteSndDrop: UInt64 + /// The number of too-late-to play missing bytes (estimate based on average packet size) + public let byteRcvDrop: UInt64 + /// The number of undecrypted bytes + public let byteRcvUndecrypt: UInt64 + /// The packet sending period, in microseconds + public let usPktSndPeriod: Double + /// The flow window size, in number of packets + public let pktFlowWindow: Int32 + /// The congestion window size, in number of packets + public let pktCongestionWindow: Int32 + /// The number of packets on flight + public let pktFlightSize: Int32 + /// The RTT, in milliseconds + public let msRTT: Double + /// The estimated bandwidth, in Mb/s + public let mbpsBandwidth: Double + /// The available UDT sender buffer size + public let byteAvailSndBuf: Int32 + /// The available UDT receiver buffer size + public let byteAvailRcvBuf: Int32 + /// The transmit Bandwidth ceiling (Mbps) + public let mbpsMaxBW: Double + /// The MTU + public let byteMSS: Int32 + /// The UnACKed packets in UDT sender + public let pktSndBuf: Int32 + /// The UnACKed bytes in UDT sender + public let byteSndBuf: Int32 + /// The UnACKed timespan (msec) of UDT sender + public let msSndBuf: Int32 + /// Timestamp-based Packet Delivery Delay + public let msSndTsbPdDelay: Int32 + /// Undelivered packets in UDT receiver + public let pktRcvBuf: Int32 + /// The undelivered bytes of UDT receiver + public let byteRcvBuf: Int32 + /// The undelivered timespan (msec) of UDT receiver + public let msRcvBuf: Int32 + /// The Timestamp-based Packet Delivery Delay + public let msRcvTsbPdDelay: Int32 + /// The number of control packets supplied by packet filter + public let pktSndFilterExtraTotal: Int32 + /// The number of control packets received and not supplied back + public let pktRcvFilterExtraTotal: Int32 + /// The number of packets that the filter supplied extra (e.g. FEC rebuilt) + public let pktRcvFilterSupplyTotal: Int32 + /// The number of packet loss not coverable by filter + public let pktRcvFilterLossTotal: Int32 + /// The number of control packets supplied by packet filter + public let pktSndFilterExtra: Int32 + /// The number of control packets received and not supplied back + public let pktRcvFilterExtra: Int32 + /// The number of packets that the filter supplied extra (e.g. FEC rebuilt) + public let pktRcvFilterSupply: Int32 + /// The number of packet loss not coverable by filter + public let pktRcvFilterLoss: Int32 + /// The packet reorder tolerance value + public let pktReorderTolerance: Int32 + + init(mon: CBytePerfMon) { + self.msTimeStamp = mon.msTimeStamp + self.pktSentTotal = mon.pktSentTotal + self.pktRecvTotal = mon.pktRecvTotal + self.pktSndLossTotal = mon.pktSndLossTotal + self.pktRcvLossTotal = mon.pktRcvLossTotal + self.pktRetransTotal = mon.pktRetransTotal + self.pktSentACKTotal = mon.pktSentACKTotal + self.pktRecvACKTotal = mon.pktRecvACKTotal + self.pktSentNAKTotal = mon.pktSentNAKTotal + self.pktRecvNAKTotal = mon.pktRecvNAKTotal + self.usSndDurationTotal = mon.usSndDurationTotal + self.pktSndDropTotal = mon.pktSndDropTotal + self.pktRcvDropTotal = mon.pktRcvDropTotal + self.pktRcvUndecryptTotal = mon.pktRcvUndecryptTotal + self.byteSentTotal = mon.byteSentTotal + self.byteRecvTotal = mon.byteRecvTotal + self.byteRcvLossTotal = mon.byteRcvLossTotal + self.byteRetransTotal = mon.byteRetransTotal + self.byteSndDropTotal = mon.byteSndDropTotal + self.byteRcvDropTotal = mon.byteRcvDropTotal + self.byteRcvUndecryptTotal = mon.byteRcvUndecryptTotal + self.pktSent = mon.pktSent + self.pktRecv = mon.pktRecv + self.pktSndLoss = mon.pktSndLoss + self.pktRcvLoss = mon.pktRcvLoss + self.pktRetrans = mon.pktRetrans + self.pktRcvRetrans = mon.pktRcvRetrans + self.pktSentACK = mon.pktSentACK + self.pktRecvACK = mon.pktRecvACK + self.pktSentNAK = mon.pktSentNAK + self.pktRecvNAK = mon.pktRecvNAK + self.mbpsSendRate = mon.mbpsSendRate + self.mbpsRecvRate = mon.mbpsRecvRate + self.usSndDuration = mon.usSndDuration + self.pktReorderDistance = mon.pktReorderDistance + self.pktRcvAvgBelatedTime = mon.pktRcvAvgBelatedTime + self.pktRcvBelated = mon.pktRcvBelated + self.pktSndDrop = mon.pktSndDrop + self.pktRcvDrop = mon.pktRcvDrop + self.pktRcvUndecrypt = mon.pktRcvUndecrypt + self.byteSent = mon.byteSent + self.byteRecv = mon.byteRecv + self.byteRcvLoss = mon.byteRcvLoss + self.byteRetrans = mon.byteRetrans + self.byteSndDrop = mon.byteSndDrop + self.byteRcvDrop = mon.byteRcvDrop + self.byteRcvUndecrypt = mon.byteRcvUndecrypt + self.usPktSndPeriod = mon.usPktSndPeriod + self.pktFlowWindow = mon.pktFlowWindow + self.pktCongestionWindow = mon.pktCongestionWindow + self.pktFlightSize = mon.pktFlightSize + self.msRTT = mon.msRTT + self.mbpsBandwidth = mon.mbpsBandwidth + self.byteAvailSndBuf = mon.byteAvailSndBuf + self.byteAvailRcvBuf = mon.byteAvailRcvBuf + self.mbpsMaxBW = mon.mbpsMaxBW + self.byteMSS = mon.byteMSS + self.pktSndBuf = mon.pktSndBuf + self.byteSndBuf = mon.byteSndBuf + self.msSndBuf = mon.msSndBuf + self.msSndTsbPdDelay = mon.msSndTsbPdDelay + self.pktRcvBuf = mon.pktRcvBuf + self.byteRcvBuf = mon.byteRcvBuf + self.msRcvBuf = mon.msRcvBuf + self.msRcvTsbPdDelay = mon.msRcvTsbPdDelay + self.pktSndFilterExtraTotal = mon.pktSndFilterExtraTotal + self.pktRcvFilterExtraTotal = mon.pktRcvFilterExtraTotal + self.pktRcvFilterSupplyTotal = mon.pktRcvFilterSupplyTotal + self.pktRcvFilterLossTotal = mon.pktRcvFilterLossTotal + self.pktSndFilterExtra = mon.pktSndFilterExtra + self.pktRcvFilterExtra = mon.pktRcvFilterExtra + self.pktRcvFilterSupply = mon.pktRcvFilterSupply + self.pktRcvFilterLoss = mon.pktRcvFilterLoss + self.pktReorderTolerance = mon.pktReorderTolerance + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTRejectReason.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTRejectReason.swift new file mode 100644 index 000000000..f66c5eabb --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTRejectReason.swift @@ -0,0 +1,30 @@ +import libsrt + +/// An enumeration that describes the srt connection reject reason. +/// +/// - note: The meaning of each reason follows the SRT protocol specification. +/// - seealso: https://github.com/Haivision/srt/blob/master/docs/API.md +public enum SRTRejectReason: Int, Sendable { + case unknown = 0 + case system = 1 + case peer = 2 + case resource = 3 + case rogue = 4 + case backlog = 5 + case ipe = 6 + case close = 7 + case version = 8 + case rdvcookie = 9 + case badsecret = 10 + case unsecure = 11 + case messageapi = 12 + case congestion = 13 + case filter = 14 + case group = 15 + case timeout = 16 + case crypto = 17 + + init?(socket: SRTSOCKET) { + self.init(rawValue: Int(srt_getrejectreason(socket))) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSession.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSession.swift new file mode 100644 index 000000000..ea8999103 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSession.swift @@ -0,0 +1,102 @@ +@preconcurrency import Combine +import Foundation +import HaishinKit + +actor SRTSession: Session { + var connected: Bool { + get async { + await connection.connected + } + } + + @AsyncStreamed(.closed) + private(set) var readyState: AsyncStream + + var stream: any StreamConvertible { + _stream + } + + private let uri: URL + private let mode: SessionMode + private var retryCount: Int = 0 + private var maxRetryCount = kSession_maxRetryCount + private lazy var connection = SRTConnection() + private lazy var _stream: SRTStream = { + SRTStream(connection: connection) + }() + private var cancellables: Set = [] + private var disconnctedTask: Task? { + didSet { + oldValue?.cancel() + } + } + + init(uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) { + self.uri = uri + self.mode = mode + } + + func setMaxRetryCount(_ maxRetryCount: Int) { + self.maxRetryCount = maxRetryCount + } + + func connect(_ disconnected: @Sendable @escaping () -> Void) async throws { + guard await connection.connected == false else { + return + } + _readyState.value = .connecting + do { + try await connection.connect(uri) + } catch { + if let error = error as? SRTConnection.Error { + switch error { + case .failedToConnect(let reason): + // If the timeout has expired, there is no prospect of successfully reconnecting + // even if a retry is attempted, so no retry will be performed. + guard reason == .timeout else { + retryCount = 0 + _readyState.value = .closed + throw error + } + default: + break + } + } + guard retryCount < maxRetryCount else { + retryCount = 0 + _readyState.value = .closed + throw error + } + // It is being delayed using backoff for congestion control. + try await Task.sleep(nanoseconds: UInt64(pow(2.0, Double(retryCount))) * 1_000_000_000) + retryCount += 1 + try await connect(disconnected) + } + _readyState.value = .open + retryCount = 0 + switch mode { + case .playback: + await _stream.play() + case .publish: + await _stream.publish() + } + disconnctedTask = Task { + cancellables.removeAll() + await connection.$connected.sink { + if $0 == false { + disconnected() + } + }.store(in: &cancellables) + } + } + + func close() async throws { + guard await connection.connected else { + return + } + _readyState.value = .closing + await connection.close() + retryCount = 0 + _readyState.value = .closed + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSessionFactory.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSessionFactory.swift new file mode 100644 index 000000000..8cae1de1a --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSessionFactory.swift @@ -0,0 +1,13 @@ +import Foundation +import HaishinKit + +public struct SRTSessionFactory: SessionFactory { + public let supportedProtocols: Set = ["srt"] + + public init() { + } + + public func make(_ uri: URL, mode: SessionMode, configuration: (any SessionConfiguration)?) -> any Session { + return SRTSession(uri: uri, mode: mode, configuration: configuration) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocket.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocket.swift new file mode 100644 index 000000000..83d9d8223 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocket.swift @@ -0,0 +1,302 @@ +import Foundation +import HaishinKit +import libsrt +import Logboard + +final actor SRTSocket { + static let payloadSize: Int = 1316 + + enum Error: Swift.Error { + case notConnected + case rejected(_ reason: SRTRejectReason) + case illegalState(_ message: String) + } + + enum Status: Int, CustomDebugStringConvertible { + case unknown + case `init` + case opened + case listening + case connecting + case connected + case broken + case closing + case closed + case nonexist + + var debugDescription: String { + switch self { + case .unknown: + return "unknown" + case .`init`: + return "init" + case .opened: + return "opened" + case .listening: + return "listening" + case .connecting: + return "connecting" + case .connected: + return "connected" + case .broken: + return "broken" + case .closing: + return "closing" + case .closed: + return "closed" + case .nonexist: + return "nonexist" + } + } + + init?(_ status: SRT_SOCKSTATUS) { + self.init(rawValue: Int(status.rawValue)) + defer { + logger.trace(debugDescription) + } + } + } + + var inputs: AsyncStream { + AsyncStream { continuation in + // If Task.detached is not used, closing will result in a deadlock. + Task.detached { + while await self.connected { + let result = await self.recvmsg() + if 0 <= result { + continuation.yield(await self.incomingBuffer.subdata(in: 0...Continuation? { + didSet { + oldValue?.finish() + } + } + private var connected: Bool { + status == .connected + } + private var windowSizeC: Int32 = 1024 * 4 + private lazy var incomingBuffer: Data = .init(count: Int(windowSizeC)) + + init() { + socket = srt_create_socket() + } + + init(socket: SRTSOCKET, options: [SRTSocketOption]) async throws { + self.socket = socket + guard configure(options, restriction: .post) else { + throw makeSocketError() + } + if incomingBuffer.count < windowSizeC { + incomingBuffer = .init(count: Int(windowSizeC)) + } + } + + func getSocketOption(_ name: SRTSocketOption.Name) throws -> SRTSocketOption { + return try SRTSocketOption(name: name, socket: socket) + } + + func setSocketOption(_ option: SRTSocketOption) throws { + try option.setSockflag(socket) + } + + func open(_ url: SRTSocketURL) async throws { + if socket == SRT_INVALID_SOCK { + throw makeSocketError() + } + guard configure(url.options, restriction: .pre) else { + throw makeSocketError() + } + let status: Int32 = try { + switch url.mode { + case .caller: + guard var remote = url.remote else { + return SRT_ERROR + } + var remoteaddr = remote.makeSockaddr() + return srt_connect(socket, &remoteaddr, Int32(remote.size)) + case .listener: + guard var local = url.local else { + return SRT_ERROR + } + var localaddr = local.makeSockaddr() + let status = srt_bind(socket, &localaddr, Int32(local.size)) + guard status != SRT_ERROR else { + throw makeSocketError() + } + return srt_listen(socket, 1) + case .rendezvous: + guard var remote = url.remote, var local = url.local else { + return SRT_ERROR + } + var remoteaddr = remote.makeSockaddr() + var localaddr = local.makeSockaddr() + return srt_rendezvous(socket, &remoteaddr, Int32(remote.size), &localaddr, Int32(local.size)) + } + }() + guard status != SRT_ERROR else { + let reason = SRTRejectReason(socket: socket) ?? .unknown + throw Error.rejected(reason) + } + switch url.mode { + case .listener: + break + default: + guard configure(url.options, restriction: .post) else { + throw makeSocketError() + } + if incomingBuffer.count < windowSizeC { + incomingBuffer = .init(count: Int(windowSizeC)) + } + } + await startRunning() + } + + func accept(_ options: [SRTSocketOption]) async throws -> SRTSocket { + try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in + Task.detached { [self] in + do { + let accept = srt_accept(await socket, nil, nil) + guard -1 < accept else { + throw await makeSocketError() + } + let socket = try await SRTSocket(socket: accept, options: options) + await socket.startRunning() + continuation.resume(returning: socket) + } catch { + continuation.resume(throwing: error) + } + } + } + } + + func send(_ data: Data) throws { + guard connected else { + throw Error.notConnected + } + for data in data.chunk(Self.payloadSize) { + outputs?.yield(data) + } + } + + private func configure(_ options: [SRTSocketOption], restriction: SRTSocketOption.Restriction) -> Bool { + var failures: [String] = [] + for option in options where option.name.restriction == restriction { + do { + try option.setSockflag(socket) + } catch { + failures.append(option.name.rawValue) + } + } + guard failures.isEmpty else { + logger.error(failures) + return false + } + return true + } + + private func bstats() -> Int32 { + guard socket != SRT_INVALID_SOCK else { + return SRT_ERROR + } + return srt_bstats(socket, &perf, 1) + } + + private func makeSocketError() -> Error { + let error_message = String(cString: srt_getlasterror_str()) + defer { + logger.error(error_message) + } + if socket != SRT_INVALID_SOCK { + srt_close(socket) + socket = SRT_INVALID_SOCK + } + return .illegalState(error_message) + } + + @inline(__always) + private func sendmsg(_ data: Data) -> Int32 { + return data.withUnsafeBytes { pointer in + guard let buffer = pointer.baseAddress?.assumingMemoryBound(to: CChar.self) else { + return SRT_ERROR + } + return srt_sendmsg(socket, buffer, Int32(data.count), -1, 0) + } + } + + @inline(__always) + private func recvmsg() -> Int32 { + return incomingBuffer.withUnsafeMutableBytes { pointer in + guard let buffer = pointer.baseAddress?.assumingMemoryBound(to: CChar.self) else { + return SRT_ERROR + } + return srt_recvmsg(socket, buffer, windowSizeC) + } + } +} + +extension SRTSocket: AsyncRunner { + // MARK: AsyncRunner + func startRunning() async { + guard !isRunning else { + return + } + let stream = AsyncStream { continuation in + self.outputs = continuation + } + Task { + for await data in stream { + let result = sendmsg(data) + if result == -1 { + await stopRunning() + } + } + } + isRunning = true + } + + func stopRunning() async { + guard isRunning else { + return + } + srt_close(socket) + socket = SRT_INVALID_SOCK + outputs = nil + isRunning = false + } +} + +extension SRTSocket: NetworkTransportReporter { + // MARK: NetworkTransportReporter + func makeNetworkTransportReport() -> NetworkTransportReport { + _ = bstats() + let performanceData = self.performanceData + return .init( + queueBytesOut: Int(performanceData.byteSndBuf), + totalBytesIn: Int(performanceData.byteRecvTotal), + totalBytesOut: Int(performanceData.byteSentTotal) + ) + } + + func makeNetworkMonitor() -> NetworkMonitor { + return .init(self) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.Name.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.Name.swift new file mode 100644 index 000000000..5b32d7ab1 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.Name.swift @@ -0,0 +1,235 @@ +import Foundation +import libsrt + +extension SRTSocketOption.Name: RawRepresentable { + // MARK: RawRepresentable + public init?(rawValue: String) { + switch rawValue { + case "bindtodevice": + self = .bindtodevice + case "congestion": + self = .congestion + case "conntimeo": + self = .conntimeo + case "drifttrace": + self = .drifttracer + case "enforcedencryption": + self = .enforcedencryption + case "event": + self = .event + case "fc": + self = .fc + case "groupconnect": + self = .groupconnect + case "groupminstabletimeo": + self = .groupminstabletimeo + case "grouptype": + self = .grouptype + case "inputbw": + self = .inputbw + case "iptos": + self = .iptos + case "ipttl": + self = .ipttl + case "ipv6only": + self = .ipv6only + case "isn": + self = .isn + case "kmpreannounce": + self = .kmpreannounce + case "kmrefreshrate": + self = .kmrefreshrate + case "kmstate": + self = .kmstate + case "latency": + self = .latency + case "lossmaxttl": + self = .lossmaxttl + case "maxbw": + self = .maxbw + case "messageapi": + self = .messageapi + case "mininputbw": + self = .mininputbw + case "minversion": + self = .minversion + case "mss": + self = .mss + case "nakreport": + self = .nakreport + case "packetfilter": + self = .packetfilter + case "passphrase": + self = .passphrase + case "pbkeylen": + self = .pbkeylen + case "peeridletimeo": + self = .peeridletimeo + case "peerlatency": + self = .peerlatency + case "peerversion": + self = .peerversion + case "rcvsyn": + self = .rcvsyn + case "rcvtimeo": + self = .rcvtimeo + case "rendezvous": + self = .rendezvous + case "retransmitalgo": + self = .retransmitalgo + case "reuseaddr": + self = .reuseaddr + case "sender": + self = .sender + case "sndbuf": + self = .sndbuf + case "snddata": + self = .snddata + case "snddropdelay": + self = .snddropdelay + case "sndkmstate": + self = .sndkmstate + case "sndsyn": + self = .sndsyn + case "sndtimeo": + self = .sndtimeo + case "state": + self = .state + case "streamid": + self = .streamid + case "tlpktdrop": + self = .tlpktdrop + case "transtype": + self = .transtype + case "tsbpdmode": + self = .tsbpdmode + case "udp_rcvbuf": + self = .udpRcvbuf + case "udp_sndbuf": + self = .udpSndbuf + case "version": + self = .version + default: + return nil + } + } +} + +// https://github.com/Haivision/srt/blob/master/docs/API/API-socket-options.md#list-of-options +extension SRTSocketOption.Name { + /// An option that represents the SRTO_BINDTODEVICE. + public static let bindtodevice = Self(rawValue: "bindtodevice", symbol: SRTO_BINDTODEVICE, restriction: .preBind, type: .string) + /// An option that represents the SRTO_CONGESTION. + public static let congestion = Self(rawValue: "congestion", symbol: SRTO_CONGESTION, restriction: .pre, type: .string) + /// An option that represents the SRTO_CONNTIMEO. + public static let conntimeo = Self(rawValue: "conntimeo", symbol: SRTO_CONNTIMEO, restriction: .pre, type: .int32) + // public static let cryptomode = Self(name: "cryptomode", symbol: SRTO_CRYPTOMODE, restriction: .pre, type: .int32) + /// An option that represents the SRTO_DRIFTTRACER. + public static let drifttracer = Self(rawValue: "drifttracer", symbol: SRTO_DRIFTTRACER, restriction: .post, type: .bool) + /// An option that represents the SRTO_ENFORCEDENCRYPTION. + public static let enforcedencryption = Self(rawValue: "enforcedencryption", symbol: SRTO_ENFORCEDENCRYPTION, restriction: .pre, type: .bool) + /// An option that represents the SRTO_EVENT. + public static let event = Self(rawValue: "event", symbol: SRTO_EVENT, restriction: .none, type: .int32) + /// An option that represents the SRTO_FC. + public static let fc = Self(rawValue: "fc", symbol: SRTO_FC, restriction: .pre, type: .int32) + /// An option that represents the SRTO_GROUPCONNECT. + public static let groupconnect = Self(rawValue: "groupconnect", symbol: SRTO_GROUPCONNECT, restriction: .pre, type: .int32) + /// An option that represents the SRTO_GROUPMINSTABLETIMEO. + public static let groupminstabletimeo = Self(rawValue: "groupminstabletimeo", symbol: SRTO_GROUPMINSTABLETIMEO, restriction: .pre, type: .int32) + /// An option that represents the SRTO_GROUPTYPE. + public static let grouptype = Self(rawValue: "grouptype", symbol: SRTO_GROUPTYPE, restriction: .pre, type: .int32) + /// An option that represents the SRTO_INPUTBW. + public static let inputbw = Self(rawValue: "inputbw", symbol: SRTO_INPUTBW, restriction: .post, type: .int64) + /// An option that represents the SRTO_IPTOS. + public static let iptos = Self(rawValue: "iptos", symbol: SRTO_IPTOS, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_IPTTL. + public static let ipttl = Self(rawValue: "ipttl", symbol: SRTO_IPTTL, restriction: .preBind, type: .int64) + /// An option that represents the SRTO_IPV6ONLY. + public static let ipv6only = Self(rawValue: "ipv6only", symbol: SRTO_IPV6ONLY, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_ISN. + public static let isn = Self(rawValue: "isn", symbol: SRTO_ISN, restriction: .none, type: .int32) + /// An option that represents the SRTO_KMPREANNOUNCE. + public static let kmpreannounce = Self(rawValue: "kmpreannounce", symbol: SRTO_KMPREANNOUNCE, restriction: .pre, type: .int32) + /// An option that represents the SRTO_KMREFRESHRATE. + public static let kmrefreshrate = Self(rawValue: "kmrefreshrate", symbol: SRTO_KMREFRESHRATE, restriction: .pre, type: .int32) + /// An option that represents the SRTO_KMSTATE. + public static let kmstate = Self(rawValue: "kmstate", symbol: SRTO_KMSTATE, restriction: .none, type: .int32) + /// An option that represents the SRTO_LATENCY. + public static let latency = Self(rawValue: "latency", symbol: SRTO_LATENCY, restriction: .pre, type: .int32) + // public static let linger = Self(name: "linger", symbol: SRTO_LINGER, restriction: .pre, type: .bool) + /// An option that represents the SRTO_LOSSMAXTTL. + public static let lossmaxttl = Self(rawValue: "lossmaxttl", symbol: SRTO_LOSSMAXTTL, restriction: .pre, type: .int32) + /// An option that represents the SRTO_MAXBW. + public static let maxbw = Self(rawValue: "maxbw", symbol: SRTO_MAXBW, restriction: .post, type: .int64) + // public static let maxrexmitbw = Self(name: "maxrexmitbw", symbol: SRTO_MAXREXMITBW, restriction: .post, type: .int64) + /// An option that represents the SRTO_MESSAGEAPI. + public static let messageapi = Self(rawValue: "messageapi", symbol: SRTO_MESSAGEAPI, restriction: .pre, type: .bool) + /// An option that represents the SRTO_MININPUTBW. + public static let mininputbw = Self(rawValue: "mininputbw", symbol: SRTO_MININPUTBW, restriction: .pre, type: .int64) + /// An option that represents the SRTO_MINVERSION. + public static let minversion = Self(rawValue: "minversion", symbol: SRTO_MINVERSION, restriction: .pre, type: .int32) + /// An option that represents the SRTO_MSS. + public static let mss = Self(rawValue: "mss", symbol: SRTO_MSS, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_NAKREPORT. + public static let nakreport = Self(rawValue: "nakreport", symbol: SRTO_NAKREPORT, restriction: .pre, type: .bool) + /// An option that represents the SRTO_OHEADBW. + public static let oheadbw = Self(rawValue: "oheadbw", symbol: SRTO_OHEADBW, restriction: .post, type: .int32) + /// An option that represents the SRTO_PACKETFILTER. + public static let packetfilter = Self(rawValue: "packetfilter", symbol: SRTO_PACKETFILTER, restriction: .pre, type: .string) + /// An option that represents the SRTO_PASSPHRASE. + public static let passphrase = Self(rawValue: "passphrase", symbol: SRTO_PASSPHRASE, restriction: .pre, type: .string) + /// An option that represents the SRTO_PBKEYLEN. + public static let pbkeylen = Self(rawValue: "pbkeylen", symbol: SRTO_PBKEYLEN, restriction: .pre, type: .int32) + /// An option that represents the SRTO_PEERIDLETIME. + public static let peeridletimeo = Self(rawValue: "peeridletimeo", symbol: SRTO_PEERIDLETIMEO, restriction: .pre, type: .int32) + /// An option that represents the SRTO_PEERLATENCY. + public static let peerlatency = Self(rawValue: "peerlatency", symbol: SRTO_PEERLATENCY, restriction: .pre, type: .int32) + /// An option that represents the SRTO_PEERVERSION. + public static let peerversion = Self(rawValue: "peerversion", symbol: SRTO_PEERVERSION, restriction: .pre, type: .string) + /// An option that represents the SRTO_RCVBUF. + public static let rcvbuf = Self(rawValue: "rcvbuf", symbol: SRTO_RCVBUF, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_RCVDATA. + public static let rcvdata = Self(rawValue: "rcvdata", symbol: SRTO_RCVDATA, restriction: .none, type: .int32) + /// An option that represents the SRTO_RCVLATENCY. + public static let rcvlatency = Self(rawValue: "rcvlatency", symbol: SRTO_RCVLATENCY, restriction: .pre, type: .int32) + /// An option that represents the SRTO_RCVSYN. + public static let rcvsyn = Self(rawValue: "rcvsyn", symbol: SRTO_RCVSYN, restriction: .post, type: .bool) + /// An option that represents the SRTO_RCVTIMEO. + public static let rcvtimeo = Self(rawValue: "rcvtimeo", symbol: SRTO_RCVTIMEO, restriction: .post, type: .int32) + /// An option that represents the SRTO_RENDEZVOUS. + public static let rendezvous = Self(rawValue: "rendezvous", symbol: SRTO_RENDEZVOUS, restriction: .pre, type: .bool) + /// An option that represents the SRTO_RETRANSMITALGO. + public static let retransmitalgo = Self(rawValue: "retransmitalgo", symbol: SRTO_RETRANSMITALGO, restriction: .pre, type: .int32) + /// An option that represents the SRTO_REUSEADDR. + public static let reuseaddr = Self(rawValue: "reuseaddr", symbol: SRTO_REUSEADDR, restriction: .preBind, type: .bool) + /// An option that represents the SRTO_SENDER. + public static let sender = Self(rawValue: "sender", symbol: SRTO_SENDER, restriction: .pre, type: .bool) + /// An option that represents the SRTO_SNDBUF. + public static let sndbuf = Self(rawValue: "sndbuf", symbol: SRTO_SNDBUF, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_SNDDATA. + public static let snddata = Self(rawValue: "snddata", symbol: SRTO_SNDDATA, restriction: .none, type: .int32) + /// An option that represents the SRTO_SNDDROPDELAY. + public static let snddropdelay = Self(rawValue: "snddropdelay", symbol: SRTO_SNDDROPDELAY, restriction: .post, type: .int32) + /// An option that represents the SRTO_SNDKMSTATE. + public static let sndkmstate = Self(rawValue: "sndkmstate", symbol: SRTO_SNDKMSTATE, restriction: .post, type: .int32) + /// An option that represents the SRTO_SNDSYN. + public static let sndsyn = Self(rawValue: "sndsyn", symbol: SRTO_SNDSYN, restriction: .post, type: .bool) + /// An option that represents the SRTO_SNDTIMEO. + public static let sndtimeo = Self(rawValue: "sndtimeo", symbol: SRTO_SNDTIMEO, restriction: .post, type: .int32) + /// An option that represents the SRTO_STATE. + public static let state = Self(rawValue: "state", symbol: SRTO_STATE, restriction: .none, type: .int32) + /// An option that represents the SRTO_STREAMID. + public static let streamid = Self(rawValue: "streamid", symbol: SRTO_STREAMID, restriction: .pre, type: .string) + /// An option that represents the SRTO_TLPKTDROP. + public static let tlpktdrop = Self(rawValue: "tlpktdrop", symbol: SRTO_TLPKTDROP, restriction: .pre, type: .bool) + /// An option that represents the SRTO_TRANSTYPE. + public static let transtype = Self(rawValue: "transtype", symbol: SRTO_TRANSTYPE, restriction: .pre, type: .int32) + /// An option that represents the SRTO_TSBPDMODE. + public static let tsbpdmode = Self(rawValue: "tsbpdmode", symbol: SRTO_TSBPDMODE, restriction: .pre, type: .bool) + /// An option that represents the SRTO_UDP_RCVBUF. + public static let udpRcvbuf = Self(rawValue: "udp_rcvbuf", symbol: SRTO_UDP_RCVBUF, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_UDP_SNDBUF. + public static let udpSndbuf = Self(rawValue: "udp_sndbuf", symbol: SRTO_UDP_SNDBUF, restriction: .preBind, type: .int32) + /// An option that represents the SRTO_VERSION. + public static let version = Self(rawValue: "version", symbol: SRTO_VERSION, restriction: .none, type: .int32) +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.swift new file mode 100644 index 000000000..2c308f50c --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTSocketOption.swift @@ -0,0 +1,219 @@ +import Foundation +import libsrt + +/// A structure that represents a Key-Value-Object for the SRTSocket. +public struct SRTSocketOption: Sendable { + /// The error domain codes. + public enum Error: Swift.Error { + case invalidOption(_ message: String) + case invalidArgument(_ message: String) + } + + private static let trueStringLiterals: [String: Bool] = [ + "1": true, + "on": true, + "yes": true, + "true": true + ] + + private static let falseStringLiterals: [String: Bool] = [ + "0": false, + "off": false, + "no": false, + "false": false + ] + + /// Constants that indicate the sockopt c-types. + public enum CType: Int, Sendable { + case string + case int32 + case int64 + case bool + + var size: Int { + switch self { + case .string: + return 512 + case .int32: + return MemoryLayout.size + case .int64: + return MemoryLayout.size + case .bool: + return MemoryLayout.size + } + } + + func data(_ value: any Sendable) throws -> Data { + switch self { + case .string: + guard let data = String(describing: value).data(using: .utf8) else { + throw Error.invalidArgument("\(value)") + } + return data + case .bool: + guard var value = value as? Bool else { + throw Error.invalidArgument("\(value)") + } + return .init(bytes: &value, count: size) + case .int32: + guard var value = value as? Int32 else { + throw Error.invalidArgument("\(value)") + } + return .init(bytes: &value, count: size) + case .int64: + guard var value = value as? Int64 else { + throw Error.invalidArgument("\(value)") + } + return .init(bytes: &value, count: size) + } + } + } + + /// Constants that indicate the sockopt binding timings. + public enum Restriction: Int, Sendable { + case preBind + case pre + case post + } + + /// A structure that defines the name of a SRTSocket option. + public struct Name: Sendable { + public typealias RawValue = String + + public let rawValue: String + let symbol: SRT_SOCKOPT + let restriction: Restriction? + let type: CType + + public init(rawValue: String, symbol: SRT_SOCKOPT, restriction: Restriction?, type: CType) { + self.rawValue = rawValue + self.symbol = symbol + self.restriction = restriction + self.type = type + } + } + + /// The socket option's name. + public let name: Name + + /// The socket option's value expressed as a String value. + public var stringValue: String { + switch name.type { + case .string: + return String(data: data, encoding: .utf8) ?? "" + case .int32: + return data.withUnsafeBytes { $0.load(as: Int32.self) }.description + case .int64: + return data.withUnsafeBytes { $0.load(as: Int64.self) }.description + case .bool: + return (data[0] == 1).description + } + } + + /// The socket option's value expressed as a Int value. + public var intValue: Int { + switch name.type { + case .string: + return -1 + case .int32: + return Int(data.withUnsafeBytes { $0.load(as: Int32.self) }) + case .int64: + return Int(data.withUnsafeBytes { $0.load(as: Int64.self) }) + case .bool: + return Int(data[0]) + } + } + + /// The socket option's value expressed as a Boolean value. + public var boolValue: Bool { + switch name.type { + case .string: + return false + case .int32: + return Int(data.withUnsafeBytes { $0.load(as: Int32.self) }) == 1 + case .int64: + return Int(data.withUnsafeBytes { $0.load(as: Int64.self) }) == 1 + case .bool: + return data[0] == 1 + } + } + + private let data: Data + + /// Creates an option. + public init(name: Name, value: String) throws { + self.name = name + switch name.type { + case .string: + self.data = try name.type.data(value) + case .int32: + switch name.rawValue { + case "transtype": + switch value { + case "live": + self.data = try name.type.data(Int32(SRTT_LIVE.rawValue)) + case "file": + self.data = try name.type.data(Int32(SRTT_FILE.rawValue)) + default: + throw Error.invalidOption(name.rawValue) + } + default: + self.data = try name.type.data(Int32(value)) + } + case .int64: + self.data = try name.type.data(Int64(value)) + case .bool: + let key = String(describing: value).lowercased() + if let bool = Self.trueStringLiterals[key] { + self.data = try name.type.data(bool) + } else if let bool = Self.falseStringLiterals[key] { + self.data = try name.type.data(bool) + } else { + throw Error.invalidOption(name.rawValue) + } + } + } + + /// Creates an option. + public init(name: Name, value: Int) throws { + self.name = name + switch name.type { + case .string: + self.data = try name.type.data(value.description) + case .int32: + self.data = try name.type.data(Int32(value)) + case .int64: + self.data = try name.type.data(Int64(value)) + case .bool: + self.data = try name.type.data(value == 1) + } + } + + init(name: Name, socket: SRTSOCKET) throws { + self.name = name + var data = Data(repeating: 0, count: name.type.size) + var length = Int32(name.type.size) + let result: Int32 = data.withUnsafeMutableBytes { + guard let buffer = $0.baseAddress else { + return -1 + } + return srt_getsockflag(socket, name.symbol, buffer, &length) + } + if result < 0 { + throw Error.invalidOption(String(cString: srt_getlasterror_str())) + } + self.data = data.subdata(in: 0.. [String: String] { + let url = url.absoluteString + if !url.contains("?") { + return [:] + } + let queryString = url.split(separator: "?")[1] + let queries = queryString.split(separator: "&") + var paramsReturn: [String: String] = [:] + for q in queries { + let query = q.split(separator: "=", maxSplits: 1) + if query.count == 2 { + paramsReturn[String(query[0])] = String(query[1]) + } + } + return paramsReturn + } + + let url: URL + let mode: SRTMode + let options: [SRTSocketOption] + + var remote: sockaddr_in? { + guard let host = url.host else { + return nil + } + return .init(host, port: url.port ?? Self.defaultPort) + } + + var local: sockaddr_in? { + let queryItems = Self.getQueryItems(url) + let adapter = queryItems["adapter"] ?? "0.0.0.0" + if let port = queryItems["port"] { + return .init(adapter, port: Int(port) ?? url.port ?? Self.defaultPort) + } + return .init(adapter, port: url.port ?? Self.defaultPort) + } + + init?(_ url: URL?) { + guard let url, let scheme = url.scheme, scheme == "srt" else { + return nil + } + let queryItems = Self.getQueryItems(url) + var options: [SRTSocketOption] = [] + for item in queryItems { + guard let name = SRTSocketOption.Name(rawValue: item.key) else { + continue + } + if let option = try? SRTSocketOption(name: name, value: item.value) { + options.append(option) + } + } + self.url = url + self.mode = { + switch queryItems["mode"] { + case "client", "caller": + return .caller + case "server", "listener": + return .listener + case "rendezvous": + return .rendezvous + default: + if queryItems["adapter"] != nil { + return .rendezvous + } + if url.host?.isEmpty == true { + return .listener + } + return .caller + } + }() + self.options = options + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTStream.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTStream.swift new file mode 100644 index 000000000..6ca6807ca --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/SRT/SRTStream.swift @@ -0,0 +1,212 @@ +@preconcurrency import AVFoundation +import Combine +import Foundation +import HaishinKit +import libsrt + +/// An actor that provides the interface to control a one-way channel over a SRTConnection. +public actor SRTStream { + static let supportedAudioCodecs: [AudioCodecSettings.Format] = [.aac] + static let supportedVideoCodecs: [VideoCodecSettings.Format] = VideoCodecSettings.Format.allCases + + /// The expected medias for transport stream. + public var expectedMedias: Set { + writer.expectedMedias + } + + @Published public private(set) var readyState: StreamReadyState = .idle + public private(set) var videoTrackId: UInt8? = UInt8.max + public private(set) var audioTrackId: UInt8? = UInt8.max + package var outputs: [any StreamOutput] = [] + package var bitRateStrategy: (any StreamBitRateStrategy)? + private lazy var writer = TSWriter() + private lazy var reader = TSReader() + package lazy var incoming = IncomingStream(self) + package lazy var outgoing = OutgoingStream() + private weak var connection: SRTConnection? + + /// The error domain codes. + public enum Error: Swift.Error { + // An unsupported codec. + case unsupportedCodec + } + + /// Creates a new stream object. + public init(connection: SRTConnection) { + self.connection = connection + Task { await connection.addStream(self) } + } + + deinit { + outputs.removeAll() + } + + /// Sends streaming audio and video from client. + /// + /// - Warning: As a prerequisite, SRTConnection must be connected. In the future, an exception will be thrown. + public func publish(_ name: String? = "") async { + guard let connection, await connection.connected else { + return + } + guard name != nil else { + switch readyState { + case .publishing: + await close() + default: + break + } + return + } + readyState = .publishing + outgoing.startRunning() + if outgoing.videoInputFormat != nil { + writer.expectedMedias.insert(.video) + } + if outgoing.audioInputFormat != nil { + writer.expectedMedias.insert(.audio) + } + if writer.expectedMedias.isEmpty { + logger.error("Please set expected media.") + } + Task { + for await buffer in outgoing.videoOutputStream { + append(buffer) + } + } + Task { + for await buffer in outgoing.audioOutputStream { + append(buffer.0, when: buffer.1) + } + } + Task { + for await buffer in outgoing.videoInputStream { + outgoing.append(video: buffer) + } + } + Task { + for await data in writer.output { + await connection.send(data) + } + } + } + + /// Playback streaming audio and video from server. + /// + /// - Warning: As a prerequisite, SRTConnection must be connected. In the future, an exception will be thrown. + public func play(_ name: String? = "") async { + guard let connection, await connection.connected else { + return + } + guard name != nil else { + switch readyState { + case .playing: + await close() + default: + break + } + return + } + await connection.recv() + Task { + await incoming.startRunning() + for await buffer in reader.output { + await incoming.append(buffer.1) + } + } + readyState = .playing + } + + /// Stops playing or publishing and makes available other uses. + public func close() async { + guard readyState != .idle else { + return + } + writer.clear() + reader.clear() + outgoing.stopRunning() + Task { await incoming.stopRunning() } + readyState = .idle + } + + /// Sets the expected media. + /// + /// This sets whether the stream contains audio only, video only, or both. Normally, this is automatically set through the append method. + /// If you cannot call the append method before publishing, please use this method to explicitly specify the contents of the stream. + public func setExpectedMedias(_ expectedMedias: Set) { + writer.expectedMedias = expectedMedias + } + + func doInput(_ data: Data) { + _ = reader.read(data) + } +} + +extension SRTStream: _Stream { + public func setAudioSettings(_ audioSettings: AudioCodecSettings) throws { + guard Self.supportedAudioCodecs.contains(audioSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.audioSettings = audioSettings + } + + public func setVideoSettings(_ videoSettings: VideoCodecSettings) throws { + guard Self.supportedVideoCodecs.contains(videoSettings.format) else { + throw Error.unsupportedCodec + } + outgoing.videoSettings = videoSettings + } + + public func append(_ sampleBuffer: CMSampleBuffer) { + switch sampleBuffer.formatDescription?.mediaType { + case .video: + if sampleBuffer.formatDescription?.isCompressed == true { + writer.videoFormat = sampleBuffer.formatDescription + writer.append(sampleBuffer) + } else { + outgoing.append(sampleBuffer) + outputs.forEach { $0.stream(self, didOutput: sampleBuffer) } + } + default: + break + } + } + + public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + switch audioBuffer { + case let audioBuffer as AVAudioPCMBuffer: + outgoing.append(audioBuffer, when: when) + outputs.forEach { $0.stream(self, didOutput: audioBuffer, when: when) } + case let audioBuffer as AVAudioCompressedBuffer: + writer.audioFormat = audioBuffer.format + writer.append(audioBuffer, when: when) + default: + break + } + } + + public func dispatch(_ event: NetworkMonitorEvent) async { + await bitRateStrategy?.adjustBitrate(event, stream: self) + } +} + +extension SRTStream: MediaMixerOutput { + // MARK: MediaMixerOutput + public func selectTrack(_ id: UInt8?, mediaType: CMFormatDescription.MediaType) { + switch mediaType { + case .audio: + audioTrackId = id + case .video: + videoTrackId = id + default: + break + } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput sampleBuffer: CMSampleBuffer) { + Task { await append(sampleBuffer) } + } + + nonisolated public func mixer(_ mixer: MediaMixer, didOutput buffer: AVAudioPCMBuffer, when: AVAudioTime) { + Task { await append(buffer, when: when) } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ADTSReader.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ADTSReader.swift new file mode 100644 index 000000000..5acaacd17 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ADTSReader.swift @@ -0,0 +1,35 @@ +import Foundation +import HaishinKit + +class ADTSReader: Sequence { + private var data: Data = .init() + + func read(_ data: Data) { + self.data = data + } + + func makeIterator() -> ADTSReaderIterator { + return ADTSReaderIterator(data: data) + } +} + +struct ADTSReaderIterator: IteratorProtocol { + private let data: Data + private var cursor: Int = 0 + private var header: ADTSHeader = .init() + + init(data: Data) { + self.data = data + } + + mutating func next() -> Int? { + guard cursor < data.count else { + return nil + } + header.data = data.advanced(by: cursor) + defer { + cursor += Int(header.aacFrameLength) + } + return Int(header.aacFrameLength) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ByteArray.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ByteArray.swift new file mode 100644 index 000000000..a6aa59403 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ByteArray.swift @@ -0,0 +1,398 @@ +import Foundation + +protocol ByteArrayConvertible { + var data: Data { get } + var length: Int { get set } + var position: Int { get set } + var bytesAvailable: Int { get } + + subscript(i: Int) -> UInt8 { get set } + + @discardableResult + func writeUInt8(_ value: UInt8) -> Self + func readUInt8() throws -> UInt8 + + @discardableResult + func writeInt8(_ value: Int8) -> Self + func readInt8() throws -> Int8 + + @discardableResult + func writeUInt16(_ value: UInt16) -> Self + func readUInt16() throws -> UInt16 + + @discardableResult + func writeInt16(_ value: Int16) -> Self + func readInt16() throws -> Int16 + + @discardableResult + func writeUInt24(_ value: UInt32) -> Self + func readUInt24() throws -> UInt32 + + @discardableResult + func writeUInt32(_ value: UInt32) -> Self + func readUInt32() throws -> UInt32 + + @discardableResult + func writeInt32(_ value: Int32) -> Self + func readInt32() throws -> Int32 + + @discardableResult + func writeUInt64(_ value: UInt64) -> Self + func readUInt64() throws -> UInt64 + + @discardableResult + func writeInt64(_ value: Int64) -> Self + func readInt64() throws -> Int64 + + @discardableResult + func writeDouble(_ value: Double) -> Self + func readDouble() throws -> Double + + @discardableResult + func writeFloat(_ value: Float) -> Self + func readFloat() throws -> Float + + @discardableResult + func writeUTF8(_ value: String) throws -> Self + func readUTF8() throws -> String + + @discardableResult + func writeUTF8Bytes(_ value: String) -> Self + func readUTF8Bytes(_ length: Int) throws -> String + + @discardableResult + func writeBytes(_ value: Data) -> Self + func readBytes(_ length: Int) throws -> Data + + @discardableResult + func clear() -> Self +} + +// MARK: - +/** + * The ByteArray class provides methods and properties the reading or writing with binary data. + */ +class ByteArray: ByteArrayConvertible { + static let fillZero: [UInt8] = [0x00] + + static let sizeOfInt8: Int = 1 + static let sizeOfInt16: Int = 2 + static let sizeOfInt24: Int = 3 + static let sizeOfInt32: Int = 4 + static let sizeOfFloat: Int = 4 + static let sizeOfInt64: Int = 8 + static let sizeOfDouble: Int = 8 + + /** + * The ByteArray error domain codes. + */ + enum Error: Swift.Error { + /// Error cause end of data. + case eof + /// Failed to parse + case parse + } + + /// Creates an empty ByteArray. + init() { + } + + /// Creates a ByteArray with data. + init(data: Data) { + self.data = data + } + + private(set) var data = Data() + + /// Specifies the length of buffer. + var length: Int { + get { + data.count + } + set { + switch true { + case (data.count < newValue): + data.append(Data(count: newValue - data.count)) + case (newValue < data.count): + data = data.subdata(in: 0.. UInt8 { + get { + data[i] + } + set { + data[i] = newValue + } + } + + /// Reading an UInt8 value. + func readUInt8() throws -> UInt8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return data[position] + } + + /// Writing an UInt8 value. + @discardableResult + func writeUInt8(_ value: UInt8) -> Self { + writeBytes(value.data) + } + + /// Readning an Int8 value. + func readInt8() throws -> Int8 { + guard ByteArray.sizeOfInt8 <= bytesAvailable else { + throw ByteArray.Error.eof + } + defer { + position += 1 + } + return Int8(bitPattern: UInt8(data[position])) + } + + /// Writing an Int8 value. + @discardableResult + func writeInt8(_ value: Int8) -> Self { + writeBytes(UInt8(bitPattern: value).data) + } + + /// Readning an UInt16 value. + func readUInt16() throws -> UInt16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return UInt16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int16 value. + func readInt16() throws -> Int16 { + guard ByteArray.sizeOfInt16 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt16 + return Int16(data: data[position - ByteArray.sizeOfInt16.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt24 value. + func readUInt24() throws -> UInt32 { + guard ByteArray.sizeOfInt24 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt24 + return UInt32(data: ByteArray.fillZero + data[position - ByteArray.sizeOfInt24.. Self { + writeBytes(value.bigEndian.data.subdata(in: 1.. UInt32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return UInt32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int32 value. + func readInt32() throws -> Int32 { + guard ByteArray.sizeOfInt32 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt32 + return Int32(data: data[position - ByteArray.sizeOfInt32.. Self { + writeBytes(value.bigEndian.data) + } + + /// Writing an UInt64 value. + @discardableResult + func writeUInt64(_ value: UInt64) -> Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an UInt64 value. + func readUInt64() throws -> UInt64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return UInt64(data: data[position - ByteArray.sizeOfInt64.. Self { + writeBytes(value.bigEndian.data) + } + + /// Reading an Int64 value. + func readInt64() throws -> Int64 { + guard ByteArray.sizeOfInt64 <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfInt64 + return Int64(data: data[position - ByteArray.sizeOfInt64.. Double { + guard ByteArray.sizeOfDouble <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfDouble + return Double(data: Data(data.subdata(in: position - ByteArray.sizeOfDouble.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a Float value. + func readFloat() throws -> Float { + guard ByteArray.sizeOfFloat <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += ByteArray.sizeOfFloat + return Float(data: Data(data.subdata(in: position - ByteArray.sizeOfFloat.. Self { + writeBytes(Data(value.data.reversed())) + } + + /// Reading a string as UTF8 value. + func readUTF8() throws -> String { + try readUTF8Bytes(Int(try readUInt16())) + } + + /// Writing a string as UTF8 value. + @discardableResult + func writeUTF8(_ value: String) throws -> Self { + let utf8 = Data(value.utf8) + return writeUInt16(UInt16(utf8.count)).writeBytes(utf8) + } + + /// Clear the buffer. + @discardableResult + func clear() -> Self { + position = 0 + data.removeAll() + return self + } + + func readUTF8Bytes(_ length: Int) throws -> String { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + + guard let result = String(data: data.subdata(in: position - length.. Self { + writeBytes(Data(value.utf8)) + } + + func readBytes(_ length: Int) throws -> Data { + guard length <= bytesAvailable else { + throw ByteArray.Error.eof + } + position += length + return data.subdata(in: position - length.. Self { + if position == data.count { + data.append(value) + position = data.count + return self + } + let length: Int = min(data.count, value.count) + data[position.. Void)) { + let r: Int = (data.count - position) % length + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex.advanced(by: -r), by: length) { + lambda(ByteArray(data: data.subdata(in: index.. [UInt32] { + let size: Int = MemoryLayout.size + if (data.endIndex - position) % size != 0 { + return [] + } + var result: [UInt32] = [] + for index in stride(from: data.startIndex.advanced(by: position), to: data.endIndex, by: size) { + result.append(UInt32(data: data[index.. UInt32 { + calculate(data, seed: nil) + } + + func calculate(_ data: Data, seed: UInt32?) -> UInt32 { + var crc: UInt32 = seed ?? 0xffffffff + for i in 0..> 24) ^ (UInt32(data[i]) & 0xff) & 0xff)] + } + return crc + } +} + +extension CRC32: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ESSpecificData.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ESSpecificData.swift new file mode 100644 index 000000000..f03958eb5 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/ESSpecificData.swift @@ -0,0 +1,76 @@ +import CoreMedia +import Foundation +import HaishinKit + +enum ESStreamType: UInt8 { + case unspecific = 0x00 + case mpeg1Video = 0x01 + case mpeg2Video = 0x02 + case mpeg1Audio = 0x03 + case mpeg2Audio = 0x04 + case mpeg2TabledData = 0x05 + case mpeg2PacketizedData = 0x06 + + case adtsAac = 0x0F + case h263 = 0x10 + + case h264 = 0x1B + case h265 = 0x24 + + var headerSize: Int { + switch self { + case .adtsAac: + return 7 + default: + return 0 + } + } +} + +struct ESSpecificData: Equatable { + static let fixedHeaderSize: Int = 5 + + var streamType: ESStreamType = .unspecific + var elementaryPID: UInt16 = 0 + var esInfoLength: UInt16 = 0 + var esDescriptors = Data() + + init() { + } + + init?(_ data: Data) { + self.data = data + } +} + +extension ESSpecificData: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + ByteArray() + .writeUInt8(streamType.rawValue) + .writeUInt16(elementaryPID | 0xe000) + .writeUInt16(esInfoLength | 0xf000) + .writeBytes(esDescriptors) + .data + } + set { + let buffer = ByteArray(data: newValue) + do { + streamType = ESStreamType(rawValue: try buffer.readUInt8()) ?? .unspecific + elementaryPID = try buffer.readUInt16() & 0x0fff + esInfoLength = try buffer.readUInt16() & 0x01ff + esDescriptors = try buffer.readBytes(Int(esInfoLength)) + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension ESSpecificData: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/PacketizedElementaryStream.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/PacketizedElementaryStream.swift new file mode 100644 index 000000000..f4cdce892 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/PacketizedElementaryStream.swift @@ -0,0 +1,369 @@ +import AVFoundation +import CoreMedia +import HaishinKit + +/** + - seealso: https://en.wikipedia.org/wiki/Packetized_elementary_stream + */ +protocol PESPacketHeader { + var startCode: Data { get set } + var streamID: UInt8 { get set } + var packetLength: UInt16 { get set } + var optionalPESHeader: PESOptionalHeader? { get set } + var data: Data { get set } +} + +// MARK: - +enum PESPTSDTSIndicator: UInt8 { + case none = 0 + case forbidden = 1 + case onlyPTS = 2 + case bothPresent = 3 +} + +// MARK: - +struct PESOptionalHeader { + static let fixedSectionSize: Int = 3 + static let defaultMarkerBits: UInt8 = 2 + static let offset = CMTime(value: 3, timescale: 30) + + var markerBits: UInt8 = PESOptionalHeader.defaultMarkerBits + var scramblingControl: UInt8 = 0 + var priority = false + var dataAlignmentIndicator = false + var copyright = false + var originalOrCopy = false + var ptsDtsIndicator: UInt8 = PESPTSDTSIndicator.none.rawValue + var esCRFlag = false + var esRateFlag = false + var dsmTrickModeFlag = false + var additionalCopyInfoFlag = false + var crcFlag = false + var extentionFlag = false + var pesHeaderLength: UInt8 = 0 + var optionalFields = Data() + var stuffingBytes = Data() + + init() { + } + + init?(data: Data) { + self.data = data + } + + mutating func setTimestamp(_ timestamp: CMTime, presentationTimeStamp: CMTime, decodeTimeStamp: CMTime) { + let base = Double(timestamp.seconds) + if presentationTimeStamp != CMTime.invalid { + ptsDtsIndicator |= 0x02 + } + if decodeTimeStamp != CMTime.invalid { + ptsDtsIndicator |= 0x01 + } + if (ptsDtsIndicator & 0x02) == 0x02 { + let pts = Int64((presentationTimeStamp.seconds + Self.offset.seconds - base) * Double(TSTimestamp.resolution)) + optionalFields += TSTimestamp.encode(pts, ptsDtsIndicator << 4) + } + if (ptsDtsIndicator & 0x01) == 0x01 { + let dts = Int64((decodeTimeStamp.seconds - base) * Double(TSTimestamp.resolution)) + optionalFields += TSTimestamp.encode(dts, 0x01 << 4) + } + pesHeaderLength = UInt8(optionalFields.count) + } + + func makeSampleTimingInfo(_ previousPresentationTimeStamp: CMTime) -> CMSampleTimingInfo? { + var presentationTimeStamp: CMTime = .invalid + var decodeTimeStamp: CMTime = .invalid + if ptsDtsIndicator & 0x02 == 0x02 { + let pts = TSTimestamp.decode(optionalFields, offset: 0) + presentationTimeStamp = .init(value: pts, timescale: CMTimeScale(TSTimestamp.resolution)) + } + if ptsDtsIndicator & 0x01 == 0x01 { + let dts = TSTimestamp.decode(optionalFields, offset: TSTimestamp.dataSize) + decodeTimeStamp = .init(value: dts, timescale: CMTimeScale(TSTimestamp.resolution)) + } + return CMSampleTimingInfo( + duration: presentationTimeStamp - previousPresentationTimeStamp, + presentationTimeStamp: presentationTimeStamp, + decodeTimeStamp: decodeTimeStamp + ) + } +} + +extension PESOptionalHeader: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + var bytes = Data([0x00, 0x00]) + bytes[0] |= markerBits << 6 + bytes[0] |= scramblingControl << 4 + bytes[0] |= (priority ? 1 : 0) << 3 + bytes[0] |= (dataAlignmentIndicator ? 1 : 0) << 2 + bytes[0] |= (copyright ? 1 : 0) << 1 + bytes[0] |= (originalOrCopy ? 1 : 0) + bytes[1] |= ptsDtsIndicator << 6 + bytes[1] |= (esCRFlag ? 1 : 0) << 5 + bytes[1] |= (esRateFlag ? 1 : 0) << 4 + bytes[1] |= (dsmTrickModeFlag ? 1 : 0) << 3 + bytes[1] |= (additionalCopyInfoFlag ? 1 : 0) << 2 + bytes[1] |= (crcFlag ? 1 : 0) << 1 + bytes[1] |= extentionFlag ? 1 : 0 + return ByteArray() + .writeBytes(bytes) + .writeUInt8(pesHeaderLength) + .writeBytes(optionalFields) + .writeBytes(stuffingBytes) + .data + } + set { + let buffer = ByteArray(data: newValue) + do { + let bytes: Data = try buffer.readBytes(PESOptionalHeader.fixedSectionSize) + markerBits = (bytes[0] & 0b11000000) >> 6 + scramblingControl = bytes[0] & 0b00110000 >> 4 + priority = (bytes[0] & 0b00001000) == 0b00001000 + dataAlignmentIndicator = (bytes[0] & 0b00000100) == 0b00000100 + copyright = (bytes[0] & 0b00000010) == 0b00000010 + originalOrCopy = (bytes[0] & 0b00000001) == 0b00000001 + ptsDtsIndicator = (bytes[1] & 0b11000000) >> 6 + esCRFlag = (bytes[1] & 0b00100000) == 0b00100000 + esRateFlag = (bytes[1] & 0b00010000) == 0b00010000 + dsmTrickModeFlag = (bytes[1] & 0b00001000) == 0b00001000 + additionalCopyInfoFlag = (bytes[1] & 0b00000100) == 0b00000100 + crcFlag = (bytes[1] & 0b00000010) == 0b00000010 + extentionFlag = (bytes[1] & 0b00000001) == 0b00000001 + pesHeaderLength = bytes[2] + optionalFields = try buffer.readBytes(Int(pesHeaderLength)) + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension PESOptionalHeader: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} + +// MARK: - +struct PacketizedElementaryStream: PESPacketHeader { + static let untilPacketLengthSize: Int = 6 + static let startCode = Data([0x00, 0x00, 0x01]) + + var startCode: Data = PacketizedElementaryStream.startCode + var streamID: UInt8 = 0 + var packetLength: UInt16 = 0 + var optionalPESHeader: PESOptionalHeader? + var data = Data() + + var payload: Data { + get { + ByteArray() + .writeBytes(startCode) + .writeUInt8(streamID) + .writeUInt16(packetLength) + .writeBytes(optionalPESHeader?.data ?? Data()) + .writeBytes(data) + .data + } + set { + let buffer = ByteArray(data: newValue) + do { + startCode = try buffer.readBytes(3) + streamID = try buffer.readUInt8() + packetLength = try buffer.readUInt16() + optionalPESHeader = PESOptionalHeader(data: try buffer.readBytes(buffer.bytesAvailable)) + if let optionalPESHeader: PESOptionalHeader = optionalPESHeader { + buffer.position = PacketizedElementaryStream.untilPacketLengthSize + 3 + Int(optionalPESHeader.pesHeaderLength) + } else { + buffer.position = PacketizedElementaryStream.untilPacketLengthSize + } + data = try buffer.readBytes(buffer.bytesAvailable) + } catch { + logger.error("\(buffer)") + } + } + } + + var isEntired: Bool { + if 0 < packetLength { + return data.count == packetLength - 8 + } + return false + } + + init?(_ payload: Data) { + self.payload = payload + if startCode != PacketizedElementaryStream.startCode { + return nil + } + } + + init?(_ sampleBuffer: CMSampleBuffer?, timeStamp: CMTime) { + guard let sampleBuffer, let dataBuffer = sampleBuffer.dataBuffer else { + return nil + } + switch sampleBuffer.formatDescription?.mediaSubType { + case .h264: + if !sampleBuffer.isNotSync { + data.append(contentsOf: [0x00, 0x00, 0x00, 0x01, 0x09, 0x10]) + sampleBuffer.formatDescription?.parameterSets.forEach { + data.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) + data.append(contentsOf: $0) + } + } else { + data.append(contentsOf: [0x00, 0x00, 0x00, 0x01, 0x09, 0x30]) + } + if let dataBytes = try? dataBuffer.dataBytes() { + let stream = ISOTypeBufferUtil(data: dataBytes) + data.append(stream.toByteStream()) + } + case .hevc: + if !sampleBuffer.isNotSync { + sampleBuffer.formatDescription?.parameterSets.forEach { + data.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) + data.append(contentsOf: $0) + } + } + if let dataBytes = try? dataBuffer.dataBytes() { + let stream = ISOTypeBufferUtil(data: dataBytes) + data.append(stream.toByteStream()) + } + default: + return nil + } + optionalPESHeader = PESOptionalHeader() + optionalPESHeader?.dataAlignmentIndicator = true + optionalPESHeader?.setTimestamp( + timeStamp, + presentationTimeStamp: sampleBuffer.presentationTimeStamp, + decodeTimeStamp: sampleBuffer.decodeTimeStamp + ) + let length = data.count + (optionalPESHeader?.data.count ?? 0) + if length < Int(UInt16.max) { + packetLength = UInt16(length) + } else { + // any length. https://en.wikipedia.org/wiki/Packetized_elementary_stream + packetLength = 0 + } + } + + init?(_ audioCompressedBuffer: AVAudioCompressedBuffer?, when: AVAudioTime, timeStamp: CMTime) { + guard let audioCompressedBuffer else { + return nil + } + data = .init(count: Int(audioCompressedBuffer.byteLength) + AudioSpecificConfig.adtsHeaderSize) + audioCompressedBuffer.encode(to: &data) + optionalPESHeader = PESOptionalHeader() + optionalPESHeader?.dataAlignmentIndicator = true + optionalPESHeader?.setTimestamp( + timeStamp, + presentationTimeStamp: when.makeTime(), + decodeTimeStamp: .invalid + ) + let length = data.count + (optionalPESHeader?.data.count ?? 0) + if length < Int(UInt16.max) { + packetLength = UInt16(length) + } else { + return nil + } + } + + func arrayOfPackets(_ PID: UInt16, PCR: UInt64?) -> [TSPacket] { + let payload = self.payload + var packets: [TSPacket] = [] + + // start + var packet = TSPacket() + packet.pid = PID + if let PCR { + packet.adaptationFieldFlag = true + packet.adaptationField = TSAdaptationField() + packet.adaptationField?.pcrFlag = true + packet.adaptationField?.pcr = TSProgramClockReference.encode(PCR, 0) + packet.adaptationField?.compute() + } + packet.payloadUnitStartIndicator = true + let position = packet.fill(payload, useAdaptationField: true) + packets.append(packet) + + // middle + let r = (payload.count - position) % 184 + for index in stride(from: payload.startIndex.advanced(by: position), to: payload.endIndex.advanced(by: -r), by: 184) { + var packet = TSPacket() + packet.pid = PID + packet.payloadFlag = true + packet.payload = payload.subdata(in: index.. Int { + self.data.append(data) + return data.count + } + + mutating func makeSampleBuffer(_ streamType: ESStreamType, previousPresentationTimeStamp: CMTime, formatDescription: CMFormatDescription?) -> CMSampleBuffer? { + var blockBuffer: CMBlockBuffer? + var sampleSizes: [Int] = [] + switch streamType { + case .h264, .h265: + ISOTypeBufferUtil.toNALFileFormat(&data) + blockBuffer = data.makeBlockBuffer(advancedBy: 0) + sampleSizes.append(blockBuffer?.dataLength ?? 0) + case .adtsAac: + blockBuffer = data.makeBlockBuffer(advancedBy: 0) + let reader = ADTSReader() + reader.read(data) + var iterator = reader.makeIterator() + while let next = iterator.next() { + sampleSizes.append(next) + } + default: + break + } + var sampleBuffer: CMSampleBuffer? + var timing = optionalPESHeader?.makeSampleTimingInfo(previousPresentationTimeStamp) ?? .invalid + guard let blockBuffer, CMSampleBufferCreate( + allocator: kCFAllocatorDefault, + dataBuffer: blockBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: formatDescription, + sampleCount: sampleSizes.count, + sampleTimingEntryCount: 1, + sampleTimingArray: &timing, + sampleSizeEntryCount: sampleSizes.count, + sampleSizeArray: &sampleSizes, + sampleBufferOut: &sampleBuffer) == noErr else { + return nil + } + return sampleBuffer + } +} + +extension PacketizedElementaryStream: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSField.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSField.swift new file mode 100644 index 000000000..966d65cac --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSField.swift @@ -0,0 +1,202 @@ +import Foundation +import HaishinKit + +class TSAdaptationField { + static let PCRSize: Int = 6 + static let fixedSectionSize: Int = 2 + + var length: UInt8 = 0 + var discontinuityIndicator = false + var randomAccessIndicator = false + var elementaryStreamPriorityIndicator = false + var pcrFlag = false + var opcrFlag = false + var splicingPointFlag = false + var transportPrivateDataFlag = false + var adaptationFieldExtensionFlag = false + var pcr = Data() + var opcr = Data() + var spliceCountdown: UInt8 = 0 + var transportPrivateDataLength: UInt8 = 0 + var transportPrivateData = Data() + var adaptationExtension: TSAdaptationExtensionField? + var stuffingBytes = Data() + + init() { + } + + init?(data: Data) { + self.data = data + } + + func compute() { + length = UInt8(truncatingIfNeeded: TSAdaptationField.fixedSectionSize) + length += UInt8(truncatingIfNeeded: pcr.count) + length += UInt8(truncatingIfNeeded: opcr.count) + length += UInt8(truncatingIfNeeded: transportPrivateData.count) + if let adaptationExtension { + length += adaptationExtension.length + 1 + } + length += UInt8(truncatingIfNeeded: stuffingBytes.count) + length -= 1 + } + + func stuffing(_ size: Int) { + stuffingBytes = Data(repeating: 0xff, count: size) + length += UInt8(size) + } +} + +extension TSAdaptationField: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + var byte: UInt8 = 0 + byte |= discontinuityIndicator ? 0x80 : 0 + byte |= randomAccessIndicator ? 0x40 : 0 + byte |= elementaryStreamPriorityIndicator ? 0x20 : 0 + byte |= pcrFlag ? 0x10 : 0 + byte |= opcrFlag ? 0x08 : 0 + byte |= splicingPointFlag ? 0x04 : 0 + byte |= transportPrivateDataFlag ? 0x02 : 0 + byte |= adaptationFieldExtensionFlag ? 0x01 : 0 + let buffer = ByteArray() + .writeUInt8(length) + .writeUInt8(byte) + if pcrFlag { + buffer.writeBytes(pcr) + } + if opcrFlag { + buffer.writeBytes(opcr) + } + if splicingPointFlag { + buffer.writeUInt8(spliceCountdown) + } + if transportPrivateDataFlag { + buffer.writeUInt8(transportPrivateDataLength).writeBytes(transportPrivateData) + } + if adaptationFieldExtensionFlag { + buffer.writeBytes(adaptationExtension!.data) + } + return buffer.writeBytes(stuffingBytes).data + } + set { + let buffer = ByteArray(data: newValue) + do { + length = try buffer.readUInt8() + let byte: UInt8 = try buffer.readUInt8() + discontinuityIndicator = (byte & 0x80) == 0x80 + randomAccessIndicator = (byte & 0x40) == 0x40 + elementaryStreamPriorityIndicator = (byte & 0x20) == 0x20 + pcrFlag = (byte & 0x10) == 0x10 + opcrFlag = (byte & 0x08) == 0x08 + splicingPointFlag = (byte & 0x04) == 0x04 + transportPrivateDataFlag = (byte & 0x02) == 0x02 + adaptationFieldExtensionFlag = (byte & 0x01) == 0x01 + if pcrFlag { + pcr = try buffer.readBytes(TSAdaptationField.PCRSize) + } + if opcrFlag { + opcr = try buffer.readBytes(TSAdaptationField.PCRSize) + } + if splicingPointFlag { + spliceCountdown = try buffer.readUInt8() + } + if transportPrivateDataFlag { + transportPrivateDataLength = try buffer.readUInt8() + transportPrivateData = try buffer.readBytes(Int(transportPrivateDataLength)) + } + if adaptationFieldExtensionFlag { + let length = Int(try buffer.readUInt8()) + buffer.position -= 1 + adaptationExtension = TSAdaptationExtensionField(data: try buffer.readBytes(length + 1)) + } + stuffingBytes = try buffer.readBytes(buffer.bytesAvailable) + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension TSAdaptationField: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} + +struct TSAdaptationExtensionField { + var length: UInt8 = 0 + var legalTimeWindowFlag = false + var piecewiseRateFlag = false + var seamlessSpiceFlag = false + var legalTimeWindowOffset: UInt16 = 0 + var piecewiseRate: UInt32 = 0 + var spliceType: UInt8 = 0 + var DTSNextAccessUnit = Data(count: 5) + + init?(data: Data) { + self.data = data + } +} + +extension TSAdaptationExtensionField: DataConvertible { + // MARK: DataConvertible + var data: Data { + get { + let buffer = ByteArray() + .writeUInt8(length) + .writeUInt8( + (legalTimeWindowFlag ? 0x80 : 0) | + (piecewiseRateFlag ? 0x40 : 0) | + (seamlessSpiceFlag ? 0x1f : 0) + ) + if legalTimeWindowFlag { + buffer.writeUInt16((legalTimeWindowFlag ? 0x8000 : 0) | legalTimeWindowOffset) + } + if piecewiseRateFlag { + buffer.writeUInt24(piecewiseRate) + } + if seamlessSpiceFlag { + buffer + .writeUInt8(spliceType) + .writeUInt8(spliceType << 4 | DTSNextAccessUnit[0]) + .writeBytes(DTSNextAccessUnit.subdata(in: 1..> 4 + DTSNextAccessUnit[0] = DTSNextAccessUnit[0] & 0x0f + } + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension TSAdaptationExtensionField: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSPacket.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSPacket.swift new file mode 100644 index 000000000..1def28860 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSPacket.swift @@ -0,0 +1,195 @@ +import AVFoundation +import HaishinKit + +/** + - seealso: https://en.wikipedia.org/wiki/MPEG_transport_stream#Packet + */ +struct TSPacket { + static let size: Int = 188 + static let headerSize: Int = 4 + static let defaultSyncByte: UInt8 = 0x47 + + var syncByte: UInt8 = TSPacket.defaultSyncByte + var transportErrorIndicator = false + var payloadUnitStartIndicator = false + var transportPriority = false + var pid: UInt16 = 0 + var scramblingControl: UInt8 = 0 + var adaptationFieldFlag = false + var payloadFlag = false + var continuityCounter: UInt8 = 0 + var adaptationField: TSAdaptationField? + var payload = Data() + + private var remain: Int { + var adaptationFieldSize = 0 + if let adaptationField, adaptationFieldFlag { + adaptationField.compute() + adaptationFieldSize = Int(adaptationField.length) + 1 + } + return TSPacket.size - TSPacket.headerSize - adaptationFieldSize - payload.count + } + + init() { + } + + init?(data: Data) { + guard TSPacket.size == data.count else { + return nil + } + self.data = data + if syncByte != TSPacket.defaultSyncByte { + return nil + } + } + + mutating func fill(_ data: Data?, useAdaptationField: Bool) -> Int { + guard let data else { + payload.append(Data(repeating: 0xff, count: remain)) + return 0 + } + payloadFlag = true + let length = min(data.count, remain, 183) + payload.append(data[0..> 8) + bytes[2] |= UInt8(pid & 0x00FF) + bytes[3] |= scramblingControl << 6 + bytes[3] |= adaptationFieldFlag ? 0x20 : 0 + bytes[3] |= payloadFlag ? 0x10 : 0 + bytes[3] |= continuityCounter + return ByteArray() + .writeBytes(bytes) + .writeBytes(adaptationFieldFlag ? adaptationField?.data ?? Data([0]) : Data()) + .writeBytes(payload) + .data + } + set { + let buffer = ByteArray(data: newValue) + do { + let data = try buffer.readBytes(4) + syncByte = data[0] + transportErrorIndicator = (data[1] & 0x80) == 0x80 + payloadUnitStartIndicator = (data[1] & 0x40) == 0x40 + transportPriority = (data[1] & 0x20) == 0x20 + pid = UInt16(data[1] & 0x1f) << 8 | UInt16(data[2]) + scramblingControl = UInt8(data[3] & 0xc0) + adaptationFieldFlag = (data[3] & 0x20) == 0x20 + payloadFlag = (data[3] & 0x10) == 0x10 + continuityCounter = UInt8(data[3] & 0xf) + if adaptationFieldFlag { + let length = Int(try buffer.readUInt8()) + if 0 < length { + buffer.position -= 1 + adaptationField = TSAdaptationField(data: try buffer.readBytes(length + 1)) + } + } + if payloadFlag { + payload = try buffer.readBytes(buffer.bytesAvailable) + } + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension TSPacket: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} + +// MARK: - +enum TSTimestamp { + static let resolution: Double = 90 * 1000 // 90kHz + static let dataSize: Int = 5 + static let ptsMask: UInt8 = 0x10 + static let ptsDtsMask: UInt8 = 0x30 + + static func decode(_ data: Data, offset: Int = 0) -> Int64 { + var result: Int64 = 0 + result |= Int64(data[offset + 0] & 0x0e) << 29 + result |= Int64(data[offset + 1]) << 22 | Int64(data[offset + 2] & 0xfe) << 14 + result |= Int64(data[offset + 3]) << 7 | Int64(data[offset + 3] & 0xfe) << 1 + return result + } + + static func encode(_ b: Int64, _ m: UInt8) -> Data { + var data = Data(count: dataSize) + data[0] = UInt8(truncatingIfNeeded: b >> 29) | 0x01 | m + data[1] = UInt8(truncatingIfNeeded: b >> 22) + data[2] = UInt8(truncatingIfNeeded: b >> 14) | 0x01 + data[3] = UInt8(truncatingIfNeeded: b >> 7) + data[4] = UInt8(truncatingIfNeeded: b << 1) | 0x01 + return data + } +} + +// MARK: - +enum TSProgramClockReference { + static let resolutionForBase: Int32 = 90 * 1000 // 90kHz + static let resolutionForExtension: Int32 = 27 * 1000 * 1000 // 27MHz + + static func decode(_ data: Data) -> (UInt64, UInt16) { + var b: UInt64 = 0 + var e: UInt16 = 0 + b |= UInt64(data[0]) << 25 + b |= UInt64(data[1]) << 17 + b |= UInt64(data[2]) << 9 + b |= UInt64(data[3]) << 1 + b |= ((data[4] & 0x80) == 0x80) ? 1 : 0 + e |= UInt16(data[4] & 0x01) << 8 + e |= UInt16(data[5]) + return (b, e) + } + + static func encode(_ b: UInt64, _ e: UInt16) -> Data { + var data = Data(count: 6) + data[0] = UInt8(truncatingIfNeeded: b >> 25) + data[1] = UInt8(truncatingIfNeeded: b >> 17) + data[2] = UInt8(truncatingIfNeeded: b >> 9) + data[3] = UInt8(truncatingIfNeeded: b >> 1) + data[4] = 0xff + if (b & 1) == 1 { + data[4] |= 0x80 + } else { + data[4] &= 0x7f + } + if UInt16(data[4] & 0x01) >> 8 == 1 { + data[4] |= 1 + } else { + data[4] &= 0xfe + } + data[5] = UInt8(truncatingIfNeeded: e) + return data + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSProgram.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSProgram.swift new file mode 100644 index 000000000..6dd4cec3c --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSProgram.swift @@ -0,0 +1,212 @@ +import Foundation +import HaishinKit + +/** + - seealso: https://en.wikipedia.org/wiki/Program-specific_information + */ +protocol TSPSIPointer { + var pointerField: UInt8 { get set } + var pointerFillerBytes: Data { get set } +} + +// MARK: - +protocol TSPSITableHeader { + var tableId: UInt8 { get set } + var sectionSyntaxIndicator: Bool { get set } + var privateBit: Bool { get set } + var sectionLength: UInt16 { get set } +} + +// MARK: - +protocol TSPSITableSyntax { + var tableIdExtension: UInt16 { get set } + var versionNumber: UInt8 { get set } + var currentNextIndicator: Bool { get set } + var sectionNumber: UInt8 { get set } + var lastSectionNumber: UInt8 { get set } + var tableData: Data { get set } + var crc32: UInt32 { get set } +} + +// MARK: - +class TSProgram: TSPSIPointer, TSPSITableHeader, TSPSITableSyntax { + static let reservedBits: UInt8 = 0x03 + static let defaultTableIDExtension: UInt16 = 1 + + // MARK: PSIPointer + var pointerField: UInt8 = 0 + var pointerFillerBytes = Data() + + // MARK: PSITableHeader + var tableId: UInt8 = 0 + var sectionSyntaxIndicator = false + var privateBit = false + var sectionLength: UInt16 = 0 + + // MARK: PSITableSyntax + var tableIdExtension: UInt16 = TSProgram.defaultTableIDExtension + var versionNumber: UInt8 = 0 + var currentNextIndicator = true + var sectionNumber: UInt8 = 0 + var lastSectionNumber: UInt8 = 0 + var tableData: Data = .init() + var crc32: UInt32 = 0 + + init() { + } + + init?(_ data: Data) { + self.data = data + } + + func arrayOfPackets(_ PID: UInt16) -> [TSPacket] { + var packets: [TSPacket] = [] + var packet = TSPacket() + packet.payloadUnitStartIndicator = true + packet.pid = PID + _ = packet.fill(data, useAdaptationField: false) + packets.append(packet) + return packets + } +} + +extension TSProgram: DataConvertible { + var data: Data { + get { + let tableData: Data = self.tableData + sectionLength = UInt16(tableData.count) + 9 + sectionSyntaxIndicator = !tableData.isEmpty + let buffer = ByteArray() + .writeUInt8(tableId) + .writeUInt16( + (sectionSyntaxIndicator ? 0x8000 : 0) | + (privateBit ? 0x4000 : 0) | + UInt16(TSProgram.reservedBits) << 12 | + sectionLength + ) + .writeUInt16(tableIdExtension) + .writeUInt8( + TSProgram.reservedBits << 6 | + versionNumber << 1 | + (currentNextIndicator ? 1 : 0) + ) + .writeUInt8(sectionNumber) + .writeUInt8(lastSectionNumber) + .writeBytes(tableData) + crc32 = CRC32.mpeg2.calculate(buffer.data) + return Data([pointerField] + pointerFillerBytes) + buffer.writeUInt32(crc32).data + } + set { + let buffer = ByteArray(data: newValue) + do { + pointerField = try buffer.readUInt8() + pointerFillerBytes = try buffer.readBytes(Int(pointerField)) + tableId = try buffer.readUInt8() + let bytes: Data = try buffer.readBytes(2) + sectionSyntaxIndicator = (bytes[0] & 0x80) == 0x80 + privateBit = (bytes[0] & 0x40) == 0x40 + sectionLength = UInt16(bytes[0] & 0x03) << 8 | UInt16(bytes[1]) + tableIdExtension = try buffer.readUInt16() + versionNumber = try buffer.readUInt8() + currentNextIndicator = (versionNumber & 0x01) == 0x01 + versionNumber = (versionNumber & 0b00111110) >> 1 + sectionNumber = try buffer.readUInt8() + lastSectionNumber = try buffer.readUInt8() + tableData = try buffer.readBytes(Int(sectionLength - 9)) + crc32 = try buffer.readUInt32() + } catch { + logger.error("\(buffer)") + } + } + } +} + +extension TSProgram: CustomDebugStringConvertible { + // MARK: CustomDebugStringConvertible + var debugDescription: String { + Mirror(reflecting: self).debugDescription + } +} + +// MARK: - +final class TSProgramAssociation: TSProgram { + static let tableID: UInt8 = 0 + + var programs: [UInt16: UInt16] = [:] + + override var tableData: Data { + get { + let buffer = ByteArray() + for (number, programMapPID) in programs { + buffer.writeUInt16(number).writeUInt16(programMapPID | 0xe000) + } + return buffer.data + } + set { + let buffer = ByteArray(data: newValue) + do { + for _ in 0.. Bool in + lhs.elementaryPID < rhs.elementaryPID + } + for essd in elementaryStreamSpecificData { + bytes.append(essd.data) + } + return ByteArray() + .writeUInt16(PCRPID | 0xe000) + .writeUInt16(programInfoLength | 0xf000) + .writeBytes(bytes) + .data + } + set { + let buffer = ByteArray(data: newValue) + do { + PCRPID = try buffer.readUInt16() & 0x1fff + programInfoLength = try buffer.readUInt16() & 0x03ff + buffer.position += Int(programInfoLength) + var position = 0 + while 0 < buffer.bytesAvailable { + position = buffer.position + guard let data = ESSpecificData(try buffer.readBytes(buffer.bytesAvailable)) else { + break + } + buffer.position = position + ESSpecificData.fixedHeaderSize + Int(data.esInfoLength) + elementaryStreamSpecificData.append(data) + } + } catch { + logger.error("\(buffer)") + } + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSReader.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSReader.swift new file mode 100644 index 000000000..c45e3c819 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSReader.swift @@ -0,0 +1,152 @@ +import AVFoundation +import Foundation +import HaishinKit + +/// A class represents that reads MPEG-2 transport stream data. +final class TSReader { + /// An asynchronous sequence for reading data. + public var output: AsyncStream<(UInt16, CMSampleBuffer)> { + AsyncStream { continuation in + self.continuation = continuation + } + } + private var pat: TSProgramAssociation? { + didSet { + guard let pat else { + return + } + for (channel, PID) in pat.programs { + programs[PID] = channel + } + if logger.isEnabledFor(level: .trace) { + logger.trace(programs) + } + } + } + private var pmt: [UInt16: TSProgramMap] = [:] { + didSet { + for pmt in pmt.values { + for data in pmt.elementaryStreamSpecificData where esSpecData[data.elementaryPID] != data { + esSpecData[data.elementaryPID] = data + } + } + if logger.isEnabledFor(level: .trace) { + logger.trace(esSpecData) + } + } + } + private var programs: [UInt16: UInt16] = [:] + private var esSpecData: [UInt16: ESSpecificData] = [:] + private var continuation: AsyncStream<(UInt16, CMSampleBuffer)>.Continuation? { + didSet { + oldValue?.finish() + } + } + private var nalUnitReader = NALUnitReader() + private var formatDescriptions: [UInt16: CMFormatDescription] = [:] + private var packetizedElementaryStreams: [UInt16: PacketizedElementaryStream] = [:] + private var previousPresentationTimeStamps: [UInt16: CMTime] = [:] + + /// Create a new instance. + init() { + } + + /// Reads transport-stream data. + func read(_ data: Data) -> Int { + let count = data.count / TSPacket.size + for i in 0.. CMSampleBuffer? { + guard + let data = esSpecData[id], + var pes = packetizedElementaryStreams[id], pes.isEntired || forUpdate else { + return nil + } + defer { + packetizedElementaryStreams[id] = nil + } + let formatDescription = makeFormatDescription(data, pes: &pes) + if let formatDescription, formatDescriptions[id] != formatDescription { + formatDescriptions[id] = formatDescription + } + var isNotSync = true + switch data.streamType { + case .h264: + let units = nalUnitReader.read(&pes.data, type: H264NALUnit.self) + if let unit = units.first(where: { $0.type == .idr || $0.type == .slice }) { + var data = Data([0x00, 0x00, 0x00, 0x01]) + data.append(unit.data) + pes.data = data + } + isNotSync = !units.contains { $0.type == .idr } + case .h265: + let units = nalUnitReader.read(&pes.data, type: HEVCNALUnit.self) + isNotSync = units.contains { $0.type == .sps } + case .adtsAac: + isNotSync = false + default: + break + } + let sampleBuffer = pes.makeSampleBuffer( + data.streamType, + previousPresentationTimeStamp: previousPresentationTimeStamps[id] ?? .invalid, + formatDescription: formatDescriptions[id] + ) + sampleBuffer?.isNotSync = isNotSync + previousPresentationTimeStamps[id] = sampleBuffer?.presentationTimeStamp + return sampleBuffer + } + + private func makeFormatDescription(_ data: ESSpecificData, pes: inout PacketizedElementaryStream) -> CMFormatDescription? { + switch data.streamType { + case .adtsAac: + + return ADTSHeader(data: pes.data).makeFormatDescription() + case .h264, .h265: + return nalUnitReader.makeFormatDescription(&pes.data, type: data.streamType) + default: + return nil + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSWriter.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSWriter.swift new file mode 100644 index 000000000..d6214d4ff --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/TS/TSWriter.swift @@ -0,0 +1,230 @@ +import AVFoundation +import CoreMedia +import Foundation + +/// An object that represents writes MPEG-2 transport stream data. +final class TSWriter { + static let defaultPATPID: UInt16 = 0 + static let defaultPMTPID: UInt16 = 4095 + static let defaultVideoPID: UInt16 = 256 + static let defaultAudioPID: UInt16 = 257 + static let defaultSegmentDuration: Double = 2 + /// An asynchronous sequence for writing data. + public var output: AsyncStream { + AsyncStream { continuation in + self.continuation = continuation + } + } + /// Specifies the exptected medias = [.video, .audio]. + var expectedMedias: Set = [] + /// Specifies the audio format. + var audioFormat: AVAudioFormat? { + didSet { + guard let audioFormat, audioFormat != oldValue else { + return + } + var data = ESSpecificData() + data.streamType = audioFormat.formatDescription.streamType + data.elementaryPID = Self.defaultAudioPID + pmt.elementaryStreamSpecificData.append(data) + audioContinuityCounter = 0 + writeProgramIfNeeded() + } + } + /// Specifies the video format. + var videoFormat: CMFormatDescription? { + didSet { + guard let videoFormat, videoFormat != oldValue else { + return + } + var data = ESSpecificData() + data.streamType = videoFormat.streamType + data.elementaryPID = Self.defaultVideoPID + pmt.elementaryStreamSpecificData.append(data) + videoContinuityCounter = 0 + writeProgramIfNeeded() + } + } + + private(set) var pat: TSProgramAssociation = { + let PAT: TSProgramAssociation = .init() + PAT.programs = [1: TSWriter.defaultPMTPID] + return PAT + }() + private(set) var pmt: TSProgramMap = .init() + private var pcrPID: UInt16 = TSWriter.defaultVideoPID + private var canWriteFor: Bool { + guard !expectedMedias.isEmpty else { + return true + } + if expectedMedias.contains(.audio) && expectedMedias.contains(.video) { + return audioFormat != nil && videoFormat != nil + } + if expectedMedias.contains(.video) { + return videoFormat != nil + } + if expectedMedias.contains(.audio) { + return audioFormat != nil + } + return false + } + private var videoTimeStamp: CMTime = .invalid + private var audioTimeStamp: CMTime = .invalid + private var clockTimeStamp: CMTime = .zero + private var segmentDuration: Double = TSWriter.defaultSegmentDuration + private var rotatedTimeStamp: CMTime = .zero + private var audioContinuityCounter: UInt8 = 0 + private var videoContinuityCounter: UInt8 = 0 + private var continuation: AsyncStream.Continuation? { + didSet { + oldValue?.finish() + } + } + + /// Creates a new instance with segument duration. + init(segmentDuration: Double = 2.0) { + self.segmentDuration = segmentDuration + } + + /// Appends a buffer. + func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + guard let audioBuffer = audioBuffer as? AVAudioCompressedBuffer, canWriteFor else { + return + } + if audioTimeStamp == .invalid { + audioTimeStamp = when.makeTime() + if pcrPID == TSWriter.defaultAudioPID { + clockTimeStamp = audioTimeStamp + } + } + if var pes = PacketizedElementaryStream(audioBuffer, when: when, timeStamp: audioTimeStamp) { + pes.streamID = 192 + writePacketizedElementaryStream( + TSWriter.defaultAudioPID, + PES: pes, + timeStamp: when.makeTime(), + randomAccessIndicator: true + ) + } + } + + /// Appends a buffer. + func append(_ sampleBuffer: CMSampleBuffer) { + guard canWriteFor else { + return + } + switch sampleBuffer.formatDescription?.mediaType { + case .video: + if videoTimeStamp == .invalid { + videoTimeStamp = sampleBuffer.presentationTimeStamp + if pcrPID == Self.defaultVideoPID { + clockTimeStamp = videoTimeStamp + } + } + if var pes = PacketizedElementaryStream(sampleBuffer, timeStamp: videoTimeStamp) { + let timestamp = sampleBuffer.decodeTimeStamp == .invalid ? + sampleBuffer.presentationTimeStamp : sampleBuffer.decodeTimeStamp + pes.streamID = 224 + writePacketizedElementaryStream( + Self.defaultVideoPID, + PES: pes, + timeStamp: timestamp, + randomAccessIndicator: !sampleBuffer.isNotSync + ) + } + default: + break + } + } + + /// Clears the writer object for new transport stream. + func clear() { + audioFormat = nil + audioContinuityCounter = 0 + videoFormat = nil + videoContinuityCounter = 0 + pcrPID = Self.defaultVideoPID + pat.programs.removeAll() + pat.programs = [1: Self.defaultPMTPID] + pmt = TSProgramMap() + videoTimeStamp = .invalid + audioTimeStamp = .invalid + clockTimeStamp = .zero + rotatedTimeStamp = .zero + expectedMedias.removeAll() + continuation = nil + } + + private func writePacketizedElementaryStream(_ PID: UInt16, PES: PacketizedElementaryStream, timeStamp: CMTime, randomAccessIndicator: Bool) { + let packets: [TSPacket] = split(PID, PES: PES, timestamp: timeStamp) + rotateFileHandle(timeStamp) + + packets[0].adaptationField?.randomAccessIndicator = randomAccessIndicator + + var bytes = Data() + for var packet in packets { + switch PID { + case Self.defaultAudioPID: + packet.continuityCounter = audioContinuityCounter + audioContinuityCounter = (audioContinuityCounter + 1) & 0x0f + case Self.defaultVideoPID: + packet.continuityCounter = videoContinuityCounter + videoContinuityCounter = (videoContinuityCounter + 1) & 0x0f + default: + break + } + bytes.append(packet.data) + } + + write(bytes) + } + + private func rotateFileHandle(_ timestamp: CMTime) { + let duration = timestamp.seconds - rotatedTimeStamp.seconds + guard segmentDuration < duration else { + return + } + writeProgramIfNeeded() + rotatedTimeStamp = timestamp + } + + private func write(_ data: Data) { + continuation?.yield(data) + } + + private func writeProgram() { + pmt.PCRPID = pcrPID + var bytes = Data() + var packets: [TSPacket] = [] + packets.append(contentsOf: pat.arrayOfPackets(Self.defaultPATPID)) + packets.append(contentsOf: pmt.arrayOfPackets(Self.defaultPMTPID)) + for packet in packets { + bytes.append(packet.data) + } + write(bytes) + } + + private func writeProgramIfNeeded() { + guard !expectedMedias.isEmpty else { + return + } + guard canWriteFor else { + return + } + writeProgram() + } + + private func split(_ PID: UInt16, PES: PacketizedElementaryStream, timestamp: CMTime) -> [TSPacket] { + var PCR: UInt64? + let duration: Double = timestamp.seconds - clockTimeStamp.seconds + if pcrPID == PID && 0.02 <= duration { + PCR = UInt64((timestamp.seconds - (PID == Self.defaultVideoPID ? videoTimeStamp : audioTimeStamp).seconds) * TSTimestamp.resolution) + clockTimeStamp = timestamp + } + var packets: [TSPacket] = [] + for packet in PES.arrayOfPackets(PID, PCR: PCR) { + packets.append(packet) + } + return packets + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Util/Constants.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Util/Constants.swift new file mode 100644 index 000000000..f733752b7 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Sources/Util/Constants.swift @@ -0,0 +1,4 @@ +import Logboard + +public let kSRTHaishinKitIdentifier = "com.haishinkit.SRTHaishinKit" +nonisolated(unsafe) let logger = LBLogger.with(kSRTHaishinKitIdentifier) diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Codec/ADTSReaderTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Codec/ADTSReaderTests.swift new file mode 100644 index 000000000..42eff9ad8 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Codec/ADTSReaderTests.swift @@ -0,0 +1,19 @@ +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct ADTSReaderTests { + @Test func bytes() { + let data = Data([255, 241, 76, 128, 46, 127, 252, 33, 26, 12, 132, 255, 255, 203, 252, 189, 165, 71, 2, 151, 152, 212, 38, 103, 11, 157, 74, 227, 156, 227, 122, 147, 92, 235, 60, 248, 210, 165, 245, 226, 83, 113, 197, 105, 159, 143, 159, 247, 13, 202, 225, 255, 58, 174, 222, 223, 155, 237, 93, 241, 248, 238, 245, 235, 143, 49, 114, 54, 75, 71, 40, 207, 134, 83, 144, 96, 193, 220, 78, 38, 18, 241, 85, 105, 198, 27, 175, 14, 7, 247, 174, 225, 163, 20, 84, 163, 26, 152, 244, 127, 82, 230, 230, 180, 170, 72, 189, 111, 112, 252, 159, 110, 68, 203, 5, 86, 85, 219, 251, 7, 54, 162, 43, 100, 84, 105, 198, 88, 101, 161, 224, 123, 249, 195, 11, 42, 42, 229, 143, 123, 213, 236, 202, 64, 41, 171, 196, 207, 105, 0, 41, 92, 169, 196, 14, 79, 195, 10, 188, 246, 128, 175, 236, 121, 43, 178, 69, 205, 87, 119, 229, 238, 212, 197, 245, 167, 32, 116, 127, 175, 230, 196, 91, 176, 243, 191, 82, 5, 126, 138, 15, 253, 63, 244, 159, 139, 254, 64, 75, 46, 174, 198, 24, 103, 255, 79, 175, 255, 246, 32, 9, 123, 28, 33, 210, 167, 54, 187, 213, 201, 89, 124, 86, 77, 115, 172, 243, 227, 74, 151, 215, 137, 75, 157, 231, 90, 103, 227, 231, 253, 192, 245, 254, 85, 11, 15, 211, 181, 134, 203, 31, 249, 111, 234, 255, 78, 183, 190, 174, 67, 145, 38, 10, 235, 46, 127, 140, 170, 86, 239, 218, 239, 49, 189, 104, 45, 121, 21, 196, 250, 30, 130, 25, 100, 22, 187, 139, 245, 170, 16, 106, 87, 73, 174, 255, 129, 5, 146, 57, 215, 47, 110, 230, 90, 204, 86, 46, 226, 177, 253, 125, 153, 157, 197, 10, 34, 190, 94, 29, 181, 109, 162, 67, 155, 196, 15, 202, 182, 166, 62, 25, 167, 253, 46, 57, 194, 91, 206, 94, 207, 217, 246, 192, 234, 188, 176, 30, 113, 48, 50, 237, 6, 164, 58, 80, 45, 110, 78, 16, 42, 30, 191, 185, 235, 123, 190, 232, 2, 94, 184, 56, 255, 241, 76, 128, 46, 63, 252, 33, 26, 14, 239, 253, 251, 235, 252, 205, 161, 137, 69, 148, 171, 100, 45, 234, 229, 243, 47, 158, 169, 114, 115, 191, 191, 62, 123, 219, 219, 116, 243, 187, 231, 126, 205, 102, 188, 254, 161, 22, 254, 181, 146, 122, 25, 251, 25, 204, 253, 193, 146, 224, 107, 133, 41, 151, 66, 78, 168, 13, 30, 18, 80, 53, 186, 39, 149, 169, 222, 1, 64, 5, 138, 141, 84, 213, 226, 202, 72, 128, 26, 182, 177, 204, 128, 172, 216, 199, 18, 103, 184, 254, 43, 41, 208, 173, 242, 153, 85, 229, 135, 156, 247, 173, 180, 84, 8, 43, 126, 134, 240, 77, 196, 86, 56, 106, 118, 216, 247, 61, 60, 192, 85, 49, 143, 175, 92, 44, 36, 185, 189, 124, 123, 179, 105, 0, 199, 195, 135, 103, 108, 0, 92, 67, 183, 134, 166, 102, 208, 65, 206, 234, 58, 165, 10, 4, 185, 215, 82, 0, 10, 235, 164, 78, 208, 9, 43, 229, 18, 0, 110, 47, 207, 57, 0, 190, 124, 46, 35, 91, 129, 0, 149, 227, 32, 26, 49, 194, 235, 0, 121, 115, 231, 231, 250, 120, 115, 135, 230, 224, 254, 77, 255, 254, 223, 242, 157, 106, 217, 32, 38, 105, 46, 133, 73, 185, 187, 212, 188, 171, 231, 170, 92, 156, 245, 124, 249, 239, 111, 215, 222, 158, 119, 205, 221, 181, 154, 243, 250, 136, 160, 109, 1, 124, 38, 201, 163, 192, 225, 141, 58, 129, 79, 151, 10, 176, 84, 198, 12, 53, 113, 145, 113, 220, 232, 220, 104, 46, 117, 183, 114, 48, 30, 55, 131, 253, 121, 98, 119, 117, 49, 87, 48, 244, 112, 198, 99, 144, 66, 85, 184, 254, 178, 0, 40, 193, 153, 141, 221, 124, 151, 215, 188, 215, 177, 118, 208, 61, 110, 158, 152, 126, 222, 36, 7, 153, 249, 30, 96, 28, 82, 128, 79, 211, 104, 5, 163, 182, 161, 167, 229, 62, 221, 75, 238, 141, 120, 130, 82, 28, 41, 246, 90, 247, 168, 35, 72, 65, 119, 111, 163, 12, 110, 253, 117, 7, 154, 255, 89, 132, 28, 255, 241, 76, 128, 43, 159, 252, 33, 26, 8, 247, 252, 255, 63, 252, 221, 38, 90, 131, 144, 171, 149, 58, 171, 174, 252, 153, 115, 158, 177, 197, 20, 248, 239, 190, 57, 243, 82, 186, 166, 159, 28, 129, 253, 231, 245, 206, 161, 249, 212, 208, 58, 145, 169, 176, 66, 174, 67, 169, 106, 4, 229, 85, 171, 135, 24, 199, 58, 158, 95, 234, 253, 62, 123, 114, 133, 86, 59, 56, 190, 47, 67, 15, 78, 251, 239, 196, 96, 154, 92, 97, 191, 27, 205, 203, 172, 21, 23, 73, 94, 131, 86, 58, 47, 42, 154, 128, 213, 93, 25, 97, 26, 193, 16, 63, 176, 198, 167, 109, 94, 167, 182, 73, 160, 14, 251, 138, 18, 21, 183, 199, 121, 137, 0, 53, 124, 39, 101, 77, 33, 115, 58, 34, 236, 90, 209, 110, 88, 221, 226, 75, 36, 86, 167, 113, 148, 83, 64, 142, 49, 19, 139, 2, 239, 85, 189, 112, 5, 130, 235, 183, 142, 51, 118, 160, 82, 163, 117, 187, 65, 197, 71, 30, 141, 107, 187, 132, 139, 139, 62, 140, 68, 104, 74, 245, 238, 220, 70, 127, 111, 15, 247, 215, 183, 221, 61, 85, 235, 142, 96, 4, 213, 161, 228, 110, 102, 181, 87, 89, 163, 46, 115, 214, 122, 227, 201, 79, 142, 251, 175, 50, 165, 117, 77, 62, 57, 10, 115, 184, 63, 5, 10, 204, 112, 39, 11, 90, 243, 140, 135, 122, 66, 54, 150, 230, 198, 205, 216, 241, 40, 84, 188, 61, 166, 98, 243, 168, 102, 226, 120, 22, 1, 85, 24, 223, 173, 214, 205, 65, 17, 65, 49, 209, 199, 188, 0, 153, 10, 145, 5, 178, 45, 203, 247, 220, 41, 50, 32, 58, 208, 59, 219, 36, 98, 174, 247, 231, 0, 211, 1, 106, 2, 232, 1, 187, 116, 174, 9, 70, 14, 202, 170, 33, 161, 126, 137, 113, 21, 200, 216, 211, 215, 165, 225, 145, 55, 14, 106, 255, 234, 238, 83, 254, 147, 221, 19, 1, 192, 255, 241, 76, 128, 36, 127, 252, 33, 26, 8, 121, 191, 255, 253, 252, 229, 37, 208, 183, 119, 237, 82, 235, 51, 137, 85, 117, 172, 184, 14, 57, 151, 87, 82, 174, 80, 192, 116, 142, 120, 77, 106, 188, 248, 9, 238, 1, 0, 80, 221, 211, 94, 8, 145, 178, 226, 112, 182, 28, 126, 143, 62, 117, 68, 35, 20, 43, 186, 239, 253, 107, 77, 86, 230, 26, 211, 171, 179, 213, 132, 68, 213, 43, 43, 220, 252, 226, 210, 169, 42, 75, 197, 6, 163, 85, 49, 85, 85, 233, 244, 103, 98, 40, 76, 179, 212, 173, 17, 84, 206, 245, 113, 221, 218, 138, 25, 1, 243, 48, 173, 238, 245, 91, 217, 17, 220, 189, 210, 102, 195, 107, 74, 68, 196, 232, 227, 59, 153, 111, 107, 19, 83, 81, 209, 114, 147, 25, 128, 198, 234, 139, 169, 11, 170, 98, 150, 9, 156, 203, 6, 235, 83, 48, 134, 105, 66, 169, 80, 32, 166, 202, 184, 146, 211, 65, 166, 113, 85, 92, 138, 0, 39, 236, 107, 19, 48, 185, 159, 207, 165, 214, 77, 85, 93, 86, 178, 224, 56, 230, 93, 93, 74, 210, 134, 3, 104, 220, 224, 118, 234, 36, 199, 17, 238, 214, 115, 95, 68, 127, 47, 106, 183, 53, 16, 21, 215, 143, 104, 216, 162, 98, 107, 143, 209, 223, 32, 93, 42, 94, 190, 164, 9, 242, 59, 15, 167, 146, 65, 62, 89, 57, 23, 253, 12, 138, 20, 137, 97, 142, 37, 106, 183, 74, 238, 73, 111, 202, 88, 64, 68, 14, 21, 152, 17, 234, 247, 30, 189, 223, 128, 0, 7, 3, 125, 105, 94, 124, 0, 0, 0, 112, 255, 241, 76, 128, 39, 127, 252, 33, 26, 9, 175, 222, 223, 251, 252, 157, 161, 216, 172, 35, 152, 85, 239, 199, 124, 85, 231, 28, 245, 186, 185, 80, 137, 38, 235, 89, 198, 113, 227, 137, 55, 24, 27, 147, 162, 200, 160, 61, 107, 32, 167, 41, 218, 221, 94, 194, 15, 97, 1, 237, 63, 105, 84, 184, 198, 203, 156, 111, 226, 115, 202, 51, 187, 86, 56, 198, 134, 238, 87, 231, 121, 229, 57, 170, 25, 207, 19, 110, 93, 207, 15, 102, 11, 155, 217, 221, 237, 189, 247, 107, 161, 215, 108, 14, 125, 221, 81, 151, 62, 157, 109, 168, 187, 142, 88, 43, 60, 84, 81, 206, 33, 101, 106, 214, 154, 119, 205, 201, 89, 232, 3, 64, 139, 196, 170, 235, 82, 221, 214, 34, 154, 205, 148, 153, 178, 8, 199, 12, 92, 74, 110, 102, 233, 185, 247, 239, 113, 245, 216, 149, 162, 101, 9, 224, 39, 44, 101, 75, 183, 23, 24, 135, 0, 101, 194, 246, 168, 185, 243, 81, 83, 213, 6, 112, 91, 243, 3, 101, 196, 168, 114, 229, 143, 127, 151, 251, 228, 106, 19, 116, 149, 89, 209, 187, 227, 215, 152, 206, 57, 235, 119, 82, 167, 125, 76, 212, 155, 173, 103, 25, 198, 228, 155, 140, 8, 102, 159, 50, 178, 9, 21, 171, 81, 136, 32, 206, 82, 226, 142, 168, 23, 72, 97, 153, 149, 199, 250, 248, 65, 72, 173, 218, 248, 157, 125, 101, 1, 21, 186, 191, 248, 164, 106, 244, 173, 25, 123, 213, 141, 84, 250, 16, 120, 140, 213, 24, 196, 106, 208, 129, 150, 198, 97, 68, 212, 160, 42, 168, 32, 133, 95, 138, 255, 65, 176, 236, 19, 205, 175, 150, 224, 34, 0, 14, 191, 249, 6, 0, 159, 169, 128, 91, 24, 0, 56, 255, 241, 76, 128, 38, 127, 252, 33, 26, 15, 191, 255, 95, 247, 252, 149, 134, 142, 181, 33, 26, 95, 53, 213, 93, 110, 107, 53, 149, 124, 233, 165, 86, 183, 170, 188, 187, 110, 56, 149, 87, 216, 156, 79, 202, 243, 241, 49, 111, 67, 60, 165, 167, 176, 12, 98, 22, 235, 197, 230, 243, 90, 200, 231, 26, 124, 227, 244, 240, 43, 124, 251, 72, 137, 207, 76, 127, 251, 176, 84, 86, 101, 231, 252, 63, 70, 191, 93, 254, 114, 170, 204, 220, 251, 254, 102, 190, 26, 206, 209, 113, 21, 223, 194, 133, 208, 179, 9, 177, 150, 90, 178, 113, 145, 21, 88, 171, 199, 126, 243, 142, 249, 9, 107, 107, 156, 235, 129, 66, 204, 229, 143, 65, 51, 6, 255, 105, 135, 198, 171, 143, 159, 253, 140, 140, 36, 255, 228, 207, 188, 58, 214, 26, 246, 240, 33, 91, 221, 85, 118, 225, 237, 97, 53, 118, 184, 245, 187, 127, 81, 146, 213, 237, 33, 120, 117, 187, 236, 8, 233, 73, 177, 130, 166, 14, 167, 167, 249, 0, 18, 80, 149, 88, 209, 243, 93, 85, 214, 230, 179, 89, 87, 206, 154, 231, 91, 235, 122, 171, 203, 180, 219, 137, 85, 125, 131, 239, 219, 49, 139, 135, 188, 86, 142, 251, 46, 146, 12, 22, 36, 194, 53, 51, 89, 169, 124, 123, 63, 204, 14, 70, 131, 189, 199, 191, 69, 100, 38, 125, 209, 123, 231, 51, 146, 150, 235, 161, 156, 231, 27, 130, 22, 130, 56, 139, 254, 98, 17, 163, 1, 75, 213, 253, 201, 26, 255, 91, 179, 28, 24, 16, 194, 66, 20, 209, 91, 228, 159, 131, 10, 17, 55, 249, 112, 84, 80, 2, 160, 86, 249, 160, 176, 34, 1, 196, 0, 14, 255, 241, 76, 128, 45, 31, 252, 33, 26, 8, 253, 223, 127, 253, 252, 181, 162, 10, 66, 130, 170, 80, 98, 18, 41, 137, 215, 219, 245, 205, 231, 158, 201, 94, 186, 227, 43, 88, 189, 100, 69, 112, 222, 113, 169, 145, 128, 51, 60, 189, 53, 134, 250, 171, 120, 178, 58, 170, 188, 63, 75, 179, 16, 245, 160, 95, 91, 215, 94, 42, 250, 19, 214, 119, 97, 72, 46, 184, 209, 78, 48, 210, 209, 233, 207, 168, 67, 137, 10, 35, 114, 148, 115, 40, 42, 55, 42, 27, 233, 133, 232, 0, 214, 120, 81, 195, 250, 73, 199, 247, 70, 59, 249, 76, 234, 186, 125, 25, 27, 205, 220, 174, 239, 26, 141, 195, 143, 124, 235, 68, 86, 121, 5, 83, 57, 195, 121, 231, 209, 55, 137, 138, 139, 41, 62, 143, 101, 36, 74, 29, 239, 195, 230, 24, 20, 112, 4, 21, 68, 195, 32, 19, 157, 246, 57, 27, 212, 123, 184, 181, 48, 15, 95, 94, 49, 88, 44, 42, 151, 9, 195, 60, 242, 76, 207, 83, 131, 196, 88, 136, 122, 24, 99, 4, 231, 237, 208, 52, 78, 64, 20, 151, 77, 248, 247, 179, 230, 2, 74, 208, 187, 18, 188, 117, 198, 111, 60, 246, 74, 155, 227, 43, 85, 87, 172, 136, 174, 27, 205, 113, 50, 48, 12, 219, 243, 38, 63, 212, 127, 249, 20, 122, 126, 71, 247, 246, 133, 206, 9, 186, 93, 12, 109, 145, 21, 31, 41, 174, 141, 126, 19, 45, 52, 157, 71, 9, 225, 57, 255, 60, 2, 20, 238, 235, 156, 227, 229, 25, 2, 171, 187, 216, 168, 196, 69, 8, 156, 94, 172, 170, 128, 17, 212, 93, 214, 224, 146, 243, 154, 9, 133, 9, 231, 65, 64, 165, 199, 176, 38, 226, 3, 29, 25, 81, 21, 50, 90, 91, 35, 57, 144, 92, 232, 0, 26, 237, 21, 25, 1, 85, 147, 177, 86, 13, 232, 109, 41, 90, 25, 92, 228, 60, 152, 255, 47, 122, 217, 45, 253, 92, 114, 188, 88, 16, 164, 149, 39, 112, 1, 192, 255, 241, 76, 128, 46, 223, 252, 33, 26, 11, 58, 194, 255, 255, 220, 141, 181, 71, 65, 98, 172, 155, 227, 221, 237, 85, 190, 187, 156, 120, 173, 71, 62, 114, 229, 85, 235, 154, 213, 179, 90, 189, 253, 253, 246, 32, 124, 117, 187, 130, 253, 80, 4, 156, 168, 108, 101, 192, 150, 234, 66, 72, 107, 163, 169, 64, 138, 80, 192, 16, 14, 70, 229, 128, 35, 25, 114, 87, 117, 95, 121, 68, 33, 216, 47, 207, 236, 129, 124, 105, 65, 65, 153, 204, 226, 0, 90, 249, 141, 184, 118, 49, 129, 48, 161, 69, 215, 74, 102, 188, 117, 107, 213, 238, 188, 230, 114, 170, 224, 130, 98, 119, 215, 21, 52, 215, 44, 197, 35, 61, 222, 218, 165, 49, 139, 140, 110, 167, 159, 88, 5, 148, 234, 231, 83, 32, 23, 228, 153, 2, 106, 204, 97, 149, 226, 196, 166, 56, 186, 106, 136, 64, 139, 189, 97, 122, 36, 29, 220, 245, 80, 196, 0, 190, 190, 12, 168, 5, 85, 239, 121, 9, 85, 61, 248, 171, 186, 65, 137, 42, 186, 226, 50, 58, 125, 159, 119, 15, 195, 193, 143, 131, 171, 194, 229, 204, 42, 132, 137, 190, 53, 61, 20, 128, 2, 50, 214, 65, 162, 169, 78, 79, 126, 57, 206, 170, 179, 142, 61, 248, 162, 249, 213, 101, 202, 171, 215, 53, 171, 102, 181, 123, 182, 195, 158, 117, 131, 14, 85, 226, 158, 237, 82, 125, 219, 65, 1, 88, 145, 38, 27, 15, 158, 18, 36, 3, 52, 76, 204, 73, 161, 111, 56, 139, 233, 77, 41, 79, 118, 53, 64, 228, 69, 38, 105, 85, 12, 190, 51, 48, 246, 44, 204, 92, 87, 122, 157, 127, 105, 30, 129, 155, 178, 177, 142, 114, 162, 52, 90, 2, 201, 97, 117, 155, 136, 58, 158, 228, 81, 200, 94, 133, 44, 105, 88, 177, 231, 156, 0, 95, 130, 181, 224, 154, 208, 7, 78, 238, 79, 118, 167, 78, 112, 126, 248, 53, 251, 187, 88, 212, 116, 205, 176, 150, 167, 59, 94, 148, 149, 224, 190, 45, 43, 115, 182, 85, 170, 88, 169, 124, 210, 53, 128, 56]) + + let reader = ADTSReader() + reader.read(data) + var iterator = reader.makeIterator() + var ranges: [Int] = [] + while let range = iterator.next() { + ranges.append(range) + } + #expect(ranges == [371, 369, 348, 291, 315, 307, 360, 374]) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/Data+Extension.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/Data+Extension.swift new file mode 100644 index 000000000..6ffb49402 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/Data+Extension.swift @@ -0,0 +1,12 @@ +import Foundation + +extension Data { + var bytes: [UInt8] { + withUnsafeBytes { + guard let pointer = $0.baseAddress?.assumingMemoryBound(to: UInt8.self) else { + return [] + } + return [UInt8](UnsafeBufferPointer(start: pointer, count: count)) + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/NALUnitReader+ExtensionTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/NALUnitReader+ExtensionTests.swift new file mode 100644 index 000000000..7f6a782e2 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/Extension/NALUnitReader+ExtensionTests.swift @@ -0,0 +1,31 @@ +import CoreMedia +import Foundation +import Testing + +@testable import HaishinKit +@testable import SRTHaishinKit + +@Suite struct NALUnitReaderTests { + @Test func main() { + var data = Data([0, 0, 0, 1, 9, 240, 0, 0, 0, 1, 103, 77, 64, 13, 218, 5, 7, 236, 4, 64, 0, 0, 3, 0, 64, 0, 0, 7, 131, 197, 10, 168, 0, 0, 0, 1, 104, 239, 60, 128, 0, 0, 0, 1, 101, 136, 130, 1, 15, 250, 120, 30, 255, 244, 55, 157, 215, 115, 255, 239, 112, 39, 83, 211, 17, 103, 152, 229, 241, 131, 49, 7, 123, 10, 145, 184, 0, 0, 3, 3, 133, 122, 49, 20, 214, 115, 51, 202, 59, 43, 204, 79, 27, 229, 101, 135, 60, 234, 243, 78, 210, 98, 30, 252, 36, 38, 20, 202, 41, 121, 70, 45, 15, 54, 125, 153, 199, 236, 90, 142, 247, 27, 202, 17, 205, 77, 133, 21, 189, 212, 159, 87, 222, 100, 53, 75, 211, 139, 219, 83, 89, 59, 199, 242, 182, 18, 245, 72, 70, 50, 230, 58, 82, 122, 179, 121, 243, 232, 107, 206, 157, 13, 151, 218, 93, 118, 157, 216, 67, 142, 2, 95, 69, 134, 167, 106, 101, 67, 112, 72, 120, 144, 105, 148, 234, 94, 74, 154, 149, 190, 13, 10, 88, 148, 169, 56, 46, 152, 176, 173, 110, 22, 215, 35, 18, 203, 125, 158, 16, 25, 228, 163, 26, 63, 30, 3, 96, 123, 237, 109, 12, 174, 216, 184, 25, 33, 123, 175, 69, 154, 240, 37, 168, 99, 38, 144, 221, 227, 119, 206, 215, 149, 111, 250, 180, 134, 78, 85, 50, 129, 178, 93, 255, 227, 144, 100, 156, 113, 113, 235, 47, 242, 68, 236, 109, 135, 87, 84, 178, 184, 163, 161, 170, 184, 84, 68, 113, 213, 73, 180, 25, 1, 77, 13, 222, 138, 69, 24, 104, 255, 218, 76, 224, 26, 122, 0, 231, 230, 203, 211, 172, 224, 26, 184, 69, 180, 123, 221, 8, 182, 241, 202, 193, 169, 120, 208, 135, 31, 82, 168, 125, 93, 207, 207, 109, 14, 243, 179, 97, 102, 58, 243, 14, 152, 13, 231, 30, 221, 177, 9, 72, 68, 212, 196, 71, 223, 142, 0, 248, 116, 139, 133, 210, 142, 83, 112, 87, 53, 138, 103, 202, 169, 112, 27, 7, 213, 152, 144, 207, 141, 84, 183, 121, 30, 128, 64, 95, 28, 10, 88, 116, 188, 83, 127, 181, 57, 47, 5, 19, 62, 132, 173, 201, 203, 170, 68, 224, 135, 134, 58, 206, 71, 77, 98, 77, 150, 225, 111, 103, 65, 84, 29, 176, 97, 72, 182, 151, 220, 153, 39, 247, 78, 136, 9, 166, 140, 221, 243, 68, 139, 229, 236, 189, 181, 124, 7, 35, 230, 139, 247, 223, 16, 78, 15, 189, 12, 144, 241, 169, 170, 166, 232, 17, 221, 212, 71, 69, 95, 122, 9, 36, 153, 246, 136, 111, 36, 50, 56, 118, 181, 240, 100, 5, 137, 252, 23, 244, 131, 41, 190, 128, 198, 134, 232, 40, 242, 214, 82, 69, 9, 168, 59, 179, 254, 220, 234, 16, 1, 170, 182, 214, 131, 169, 124, 91, 19, 65, 162, 179, 8, 98, 204, 219, 240, 6, 79, 49, 67, 120, 31, 236, 103, 167, 108, 213, 69, 193, 226, 66, 66, 242, 52, 18, 161, 42, 164, 133, 191, 82, 156, 2, 204, 75, 254, 217, 111, 215, 140, 157, 195, 195, 112, 120, 165, 163, 136, 125, 92, 195, 182, 99, 106, 220]) + let reader = NALUnitReader() + let units = reader.read(&data, type: H264NALUnit.self) + let sps = units.first(where: { $0.type == .sps }) + #expect(sps?.data.bytes == [103, 77, 64, 13, 218, 5, 7, 236, 4, 64, 0, 0, 3, 0, 64, 0, 0, 7, 131, 197, 10, 168]) + #expect(reader.makeFormatDescription(&data, type: .h264) != nil) + } + + @Test func slice_startCode3() { + var data = Data([0, 0, 1, 65, 226, 8, 13, 224, 179, 253, 15, 80, 87, 254, 170, 10, 255, 213, 65, 95, 250, 168, 43, 255, 85, 5, 127, 234, 160, 175, 253, 84, 21, 255, 170, 130, 10, 197, 255, 170, 134, 250, 37, 66, 31, 232, 170, 28, 59, 199, 255, 170, 135, 42, 122, 250, 11, 143, 255, 85, 5, 127, 234, 160, 175, 253, 84, 43, 208, 134, 33, 111, 244, 37, 66, 221, 88, 190, 147, 21, 200, 236, 47, 210, 127, 166, 196, 119, 250, 168, 43, 255, 85, 5, 127, 234, 160, 175, 253, 84, 21, 255, 170, 130, 191, 245, 80, 87, 254, 170, 10, 255, 213, 65, 95, 250, 168, 43, 248]) + let reader = NALUnitReader() + let units = reader.read(&data, type: H264NALUnit.self) + #expect([65, 226, 8, 13] == units.first?.data.bytes[0..<4]) + #expect(units.contains(where: { $0.type == .slice })) + } + + @Test func spsPPS_startCode3() { + var data = Data([0, 0, 1, 39, 66, 0, 30, 171, 64, 88, 25, 242, 203, 53, 1, 1, 1, 2, 0, 0, 1, 40, 206, 60, 128, 0, 0, 1, 39, 66, 0, 30, 171, 64, 88, 25, 242, 203, 53, 1, 1, 1, 2, 0, 0, 1, 40, 206, 60, 128, 0, 0, 1, 101, 184, 32, 3, 255, 255, 254, 30, 30, 40, 0, 8, 162, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 251, 239, 190, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 175, 255, 227, 240, 65, 192, 1, 16, 101, 17, 204, 73, 101, 165, 194, 60, 154, 126, 49, 8, 164, 150, 125, 247, 223, 125, 247, 223, 125, 247, 223, 125, 247, 223, 127, 255, 138, 252, 16, 120, 0, 55, 178, 27, 153, 226, 14, 166, 169, 75, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 193, 63, 255, 134, 166, 0, 0, 128, 200, 0, 8, 17, 0, 1, 96, 242, 96, 19, 129, 190, 129, 218, 96, 235, 136, 1, 198, 4, 169, 254, 196, 184, 65, 20, 5, 128, 176, 30, 2, 0, 160, 2, 32, 166, 47, 196, 70, 248, 176, 128, 1, 5, 23, 44, 129, 0, 128, 61, 204, 4, 163, 208, 131, 132, 72, 82, 216, 66, 185, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 224, 31, 255, 96, 174, 0, 9, 168, 2, 235, 144, 14, 120, 97, 73, 182, 124, 161, 0, 0, 32, 32, 0, 2, 12, 64, 0, 32, 65, 65, 134, 48, 150, 0, 2, 1, 128, 6, 2, 0, 19, 128, 18, 51, 13, 97, 202, 112, 8, 110, 242, 1, 128, 6, 185, 128, 130, 164, 24, 117, 238, 64, 27, 154, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 235, 174, 186, 233, 235, 174, 186, 235, 175, 255, 255, 130, 239, 0, 7, 24, 27, 56, 90, 108, 195, 52, 5, 156, 12, 212, 90, 62, 1, 23, 128, 8, 35, 201, 76, 218, 71, 9, 31, 159, 253, 221, 199, 65, 220, 0, 24, 152, 156, 8, 30, 102, 87, 136, 225, 17, 170, 242, 109, 0, 88, 97, 19, 133, 35, 14, 179, 217, 217, 188, 212, 50, 138, 112, 86, 161, 151, 215, 56, 73, 126, 108, 130, 28, 45, 172, 222, 5, 7, 27, 252, 215, 255, 121, 11, 86, 3, 2, 86, 156, 239, 209, 189, 183, 117, 226, 29, 21, 192, 236, 87, 16, 153, 48, 100, 226, 90, 243, 160, 0, 100, 129, 102, 20, 1, 17, 64, 38, 251, 224, 128, 1, 0, 25, 192, 240, 160, 62, 54, 11, 90, 25, 108, 164, 6, 82, 223, 19, 200, 0, 4, 0, 136, 131, 97, 133, 147, 194, 128, 35, 135, 110, 31, 255, 234, 131, 191, 85, 162, 0, 1, 0, 61, 111, 224, 82, 52, 69, 101, 187, 238, 215, 255, 131, 0, 79, 136, 142, 24, 190, 138, 230, 192, 9, 218, 71, 116]) + let reader = NALUnitReader() + #expect(reader.makeFormatDescription(&data, type: .h264) != nil) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTConnectionTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTConnectionTests.swift new file mode 100644 index 000000000..2dfd89d9e --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTConnectionTests.swift @@ -0,0 +1,35 @@ +import Foundation +import Testing + +import libsrt +@testable import SRTHaishinKit + +@Suite struct SRTConnectionTests { + @Test func streamid_success() async throws { + Task { + let listener = SRTConnection() + try await listener.connect(URL(string: "srt://:10000?streamid=test")) + } + Task { + let connection = SRTConnection() + try await Task.sleep(nanoseconds: UInt64(1 * 1_000_000_000)) + try await connection.connect(URL(string: "srt://127.0.0.1:10000?streamid=test")) + await connection.close() + } + } + + @Test func streamid_failed() async throws { + Task { + let listener = SRTConnection() + try await listener.connect(URL(string: "srt://:10001?streamid=test&passphrase=a546994dbf25a0823f0cbadff9cc5088k9e7c2027e8e40933a04ef574bc61cd4a")) + } + Task { + let connection = SRTConnection() + try await Task.sleep(nanoseconds: UInt64(1 * 1_000_000_000)) + await #expect(throws: SRTConnection.Error.self) { + try await connection.connect(URL(string: "srt://127.0.0.1:10001?streamid=test2&passphrase=a546994dbf25a0823f0cbadff9cc5088k9e7c2027e8e40933a04ef574bc61cd4")) + } + await connection.close() + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTSocketOptionTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTSocketOptionTests.swift new file mode 100644 index 000000000..a728ff3cc --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTSocketOptionTests.swift @@ -0,0 +1,89 @@ +import Foundation +import Testing + +import libsrt +@testable import SRTHaishinKit + +@Suite actor SRTSocketOptionTests { + @Test func parseUri() async { + guard + let url = SRTSocketURL(URL(string: "srt://localhost:9000?passphrase=1234&streamid=5678&latency=1935&sndsyn=1&transtype=file")) else { + return + } + let options = url.options + #expect(options.first { $0.name == .passphrase }?.stringValue == "1234") + #expect(options.first { $0.name == .streamid }?.stringValue == "5678") + #expect(options.first { $0.name == .latency }?.intValue == 1935) + #expect(options.first { $0.name == .sndsyn }?.boolValue == true) + #expect(options.first { $0.name == .transtype }?.stringValue == SRTT_FILE.rawValue.description) + } + + @Test func string() throws { + let socket = srt_create_socket() + let expect = try SRTSocketOption(name: .streamid, value: "hello") + try? expect.setSockflag(socket) + #expect(try SRTSocketOption(name: .streamid, socket: socket).stringValue == "hello") + srt_close(socket) + } + + @Test func int32() throws { + let socket = srt_create_socket() + let expect = try SRTSocketOption(name: .latency, value: "100") + try? expect.setSockflag(socket) + #expect(try SRTSocketOption(name: .latency, socket: socket).intValue == 100) + srt_close(socket) + } + + @Test func int64() throws { + let socket = srt_create_socket() + let expect = try SRTSocketOption(name: .inputbw, value: "1000") + try? expect.setSockflag(socket) + #expect(try SRTSocketOption(name: .inputbw, socket: socket).intValue == 1000) + srt_close(socket) + } + + @Test func bool() throws { + let socket = srt_create_socket() + let expect = try SRTSocketOption(name: .tlpktdrop, value: "true") + try? expect.setSockflag(socket) + #expect(try SRTSocketOption(name: .tlpktdrop, socket: socket).boolValue == true) + srt_close(socket) + } + + @Test func transtype() throws { + let socket = srt_create_socket() + // The default is true for Live mode, and false for File mode. + // It does not support transtype.getOption, so I will test it by observing changes in the surrounding properties. + #expect(try SRTSocketOption(name: .nakreport, socket: socket).boolValue == true) + let expect = try SRTSocketOption(name: .transtype, value: "file") + try? expect.setSockflag(socket) + #expect(try SRTSocketOption(name: .nakreport, socket: socket).boolValue == false) + srt_close(socket) + } + + @Test func connection() async throws { + let connection = SRTConnection() + let option = try SRTSocketOption(name: .nakreport, value: "no") + try await connection.setSocketOption(option) + let result = try await connection.getSocketOption(.nakreport) + #expect(result?.boolValue == false) + } + + @Test func rendezvous() throws { + guard + let url = SRTSocketURL(URL(string: "srt://:9000?adapter=0.0.0.0")) else { + return + } + #expect(url.local != nil) + #expect(url.mode == SRTMode.rendezvous) + } + + @Test func mode() throws { + #expect(SRTSocketURL(URL(string: "srt://192.168.1.1:9000?mode=caller"))?.mode == SRTMode.caller) + #expect(SRTSocketURL(URL(string: "srt://192.168.1.1:9000?mode=client"))?.mode == SRTMode.caller) + #expect(SRTSocketURL(URL(string: "srt://192.168.1.1:9000?mode=listener"))?.mode == SRTMode.listener) + #expect(SRTSocketURL(URL(string: "srt://192.168.1.1:9000?mode=server"))?.mode == SRTMode.listener) + #expect(SRTSocketURL(URL(string: "srt://192.168.1.1:9000"))?.mode == SRTMode.caller) + #expect(SRTSocketURL(URL(string: "srt://:9000"))?.mode == SRTMode.listener) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTStreamTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTStreamTests.swift new file mode 100644 index 000000000..6918bc364 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/SRT/SRTStreamTests.swift @@ -0,0 +1,15 @@ +import Foundation +import HaishinKit +@testable import SRTHaishinKit +import Testing + +@Suite actor SRTStreamTests { + @Test func unsupportedAudioCodec() async { + await #expect(throws: SRTStream.Error.unsupportedCodec.self) { + let stream = SRTStream(connection: .init()) + var audio = AudioCodecSettings() + audio.format = .opus + try await stream.setAudioSettings(audio) + } + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/CRC32Tests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/CRC32Tests.swift new file mode 100644 index 000000000..357834b0e --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/CRC32Tests.swift @@ -0,0 +1,78 @@ +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct CRC32Tests { + static let tableOfMpeg2: [UInt32] = [ + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + ] + + @Test func mPEG2() { + #expect(CRC32.mpeg2.table == CRC32Tests.tableOfMpeg2) + #expect(716244146 == CRC32.mpeg2.calculate(Data([0, 176, 13, 0, 1, 193, 0, 0, 0, 1, 240, 0]))) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/ESSpecificDataTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/ESSpecificDataTests.swift new file mode 100644 index 000000000..bdf6c233f --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/ESSpecificDataTests.swift @@ -0,0 +1,22 @@ +import CoreMedia +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct ESSpecificDataTests { + private let AACData = Data([15, 225, 1, 240, 6, 10, 4, 117, 110, 100, 0]) + private let H264Data = Data([27, 225, 0, 240, 0, 15, 225, 1, 240, 6, 10, 4, 117, 110, 100, 0]) + + @Test func readAACData() { + let data = ESSpecificData(AACData) + #expect(data?.streamType == .adtsAac) + #expect(data?.elementaryPID == 257) + } + + @Test func readH264Data() { + let data = ESSpecificData(H264Data) + #expect(data?.streamType == .h264) + #expect(data?.elementaryPID == 256) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/PacketizedElementaryStreamTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/PacketizedElementaryStreamTests.swift new file mode 100644 index 000000000..2acdfdaa9 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/PacketizedElementaryStreamTests.swift @@ -0,0 +1,25 @@ +import CoreMedia +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct PacketizedElementaryStreamTests { + static let dataWithVideo: Data = .init([0, 0, 1, 224, 0, 0, 128, 128, 5, 33, 0, 7, 216, 97, 0, 0, 0, 1, 9, 240, 0, 0, 0, 1, 103, 77, 64, 13, 218, 5, 7, 236, 4, 64, 0, 0, 3, 0, 64, 0, 0, 7, 131, 197, 10, 168, 0, 0, 0, 1, 104, 239, 60, 128, 0, 0, 0, 1, 101, 136, 130, 1, 15, 250, 120, 30, 255, 244, 55, 157, 215, 115, 255, 239, 112, 39, 83, 211, 17, 103, 152, 229, 241, 131, 49, 7, 123, 10, 145, 184, 0, 0, 3, 3, 133, 122, 49, 20, 214, 115, 51, 202, 59, 43, 204, 79, 27, 229, 101, 135, 60, 234, 243, 78, 210, 98, 30, 252, 36, 38, 20, 202, 41, 121, 70, 45, 15, 54, 125, 153, 199, 236, 90, 142, 247, 27, 202, 17, 205, 77, 133, 21, 189, 212, 159, 87, 222, 100, 53, 75, 211, 139, 219, 83, 89, 59, 199, 242, 182, 18, 245, 72, 70, 50, 230, 58, 82, 122, 179, 121, 243, 232, 107, 206, 157, 13]) + + static let dataWithVideo2: Data = .init([0, 0, 1, 224, 2, 209, 132, 128, 5, 33, 0, 7, 146, 37, 0, 0, 0, 1, 9, 48, 0, 0, 0, 1, 33, 228, 144, 120, 102, 98, 87, 51, 222, 110, 218, 124, 79, 47, 168, 200, 143, 213, 67, 79, 163, 46, 254, 103, 220, 52, 39, 107, 213, 75, 201, 222, 47, 147, 216, 194, 11, 184, 142, 74, 241, 188, 221, 254, 170, 27, 111, 81, 253, 251, 139, 255, 85, 39, 122, 224, 199, 253, 84, 180, 166, 17, 223, 114, 126, 239, 19, 168, 111, 253, 84, 119, 125, 58, 189, 119, 177, 226, 45, 138, 245, 165, 134, 98, 245, 215, 190, 170, 12, 185, 109, 247, 199, 233, 105, 37, 234, 190, 170, 55, 148, 215, 252, 167, 79, 173, 197, 243, 122, 98, 121, 123, 196, 99, 34, 127, 213, 67, 124, 186, 229, 228, 210, 198, 242, 90, 226, 63, 213, 70, 115, 107, 69, 225, 30, 244, 5, 96, 90, 15, 45, 226, 187, 247, 23, 205, 190, 42, 85, 39, 250, 169, 56, 190, 92, 46, 105, 59, 228, 175, 55, 37, 107, 27, 223, 120, 79, 37, 247, 190, 81, 63, 203, 127, 226, 253, 157, 255, 55, 74, 144, 213, 143, 191, 179, 96, 251, 229, 230, 223, 93, 232, 255, 46, 255, 151, 127, 203, 233, 141, 196, 196, 243, 123, 99, 185, 54, 247, 254, 170, 184, 154, 46, 135, 205, 203, 233, 45, 14, 191, 189, 239, 196, 105, 120, 189, 247, 254, 233, 247, 221, 175, 228, 175, 19, 222, 222, 51, 113, 255, 234, 164, 235, 208, 175, 46, 248, 158, 189, 9, 117, 239, 252, 71, 75, 191, 203, 123, 87, 213, 145, 61, 223, 190, 90, 255, 86, 137, 230, 239, 87, 174, 95, 67, 226, 175, 115, 64, 163, 149, 98, 113, 31, 234, 164, 231, 179, 250, 215, 39, 99, 155, 151, 92, 131, 137, 188, 189, 93, 125, 244, 161, 223, 245, 82, 243, 117, 147, 147, 212, 71, 39, 185, 249, 40, 127, 201, 219, 249, 47, 223, 116, 95, 205, 62, 112, 191, 53, 223, 245, 95, 21, 43, 28, 176, 235, 147, 175, 205, 126, 26, 125, 27, 188, 184, 168, 67, 189, 245, 254, 171, 229, 246, 151, 55, 191, 154, 253, 189, 225, 78, 8, 182, 91, 205, 213, 235, 190, 138, 106, 201, 223, 110, 35, 252, 185, 132, 115, 245, 232, 71, 147, 107, 39, 23, 211, 107, 106, 249, 125, 198, 10, 226, 191, 213, 90, 151, 125, 59, 224, 189, 116, 93, 137, 125, 190, 185, 94, 142, 249, 45, 241, 61, 250, 9, 60, 98, 153, 191, 124, 18, 236, 246, 108, 30, 199, 196, 247, 123, 244, 39, 132, 175, 244, 165, 196, 71, 114, 251, 255, 205, 160, 63, 203, 191, 228, 244, 159, 126, 215, 150, 252, 253, 239, 151, 155, 181, 21, 201, 229, 97, 114, 213, 189, 114, 102, 35, 251, 210, 254, 106, 46, 72, 209, 162, 232, 190, 156, 119, 123, 255, 244, 49, 95, 56, 213, 253, 254, 106, 3, 227, 249, 122, 95, 125, 126, 246, 122, 238, 188, 188, 139, 123, 237, 116, 252, 133, 218, 102, 249, 123, 82, 68, 137, 44, 217, 199, 126, 83, 42, 169, 77, 19, 215, 164, 229, 239, 247, 217, 199, 54, 122, 250, 244, 71, 126, 190, 110, 156, 65, 244, 17, 57, 189, 252, 189, 254, 170, 151, 146, 252, 127, 54, 248, 222, 77, 241, 58, 190, 79, 81, 125, 247, 250, 169, 184, 141, 254, 227, 59, 217, 126, 41, 119, 176, 255, 43, 97, 241, 84, 98, 244, 171, 151, 162, 251, 223, 125, 236, 178, 242, 223, 249, 118, 94, 205, 63, 45, 251, 253, 163, 216, 250, 62, 93, 77, 205, 181, 159, 149, 116, 194, 28, 206, 143, 220, 189, 151, 203, 165, 159, 150, 130, 231, 230, 216, 247, 254, 239, 205, 222, 190, 125, 77, 159, 147, 223, 202, 168, 44, 156, 91, 99, 250, 41, 121, 125, 162, 119, 238, 78, 47, 93, 122, 229, 35, 11, 249, 74, 203, 153, 182, 82, 252, 190, 72, 27, 191, 119, 254, 171, 220, 157, 239, 137, 230, 223, 124, 180, 92, 33, 218, 74, 191, 16, 232, 123, 46, 185, 189, 3, 92, 157, 235, 151, 166, 136, 156, 189, 235, 155, 188, 255]) + + @Test func videoData() { + let pes = PacketizedElementaryStream(PacketizedElementaryStreamTests.dataWithVideo)! + let header = pes.optionalPESHeader + let timingInfo = header?.makeSampleTimingInfo(.invalid) + #expect(timingInfo?.presentationTimeStamp == CMTime(value: 126384, timescale: CMTimeScale(TSTimestamp.resolution))) + #expect(pes.payload == PacketizedElementaryStreamTests.dataWithVideo) + } + + @Test func videoData2() { + let pes = PacketizedElementaryStream(PacketizedElementaryStreamTests.dataWithVideo2)! + let packet = pes.arrayOfPackets(1, PCR: 2).count + #expect(packet == 4) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSPacketTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSPacketTests.swift new file mode 100644 index 000000000..c27c35893 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSPacketTests.swift @@ -0,0 +1,28 @@ +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct TSPacketTests { + static let dataWithMetadata: Data = .init([71, 64, 17, 16, 0, 66, 240, 37, 0, 1, 193, 0, 0, 0, 1, 255, 0, 1, 252, 128, 20, 72, 18, 1, 6, 70, 70, 109, 112, 101, 103, 9, 83, 101, 114, 118, 105, 99, 101, 48, 49, 167, 121, 160, 3, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]) + + @Test func packet() { + let packetWithMetadata = TSPacket(data: TSPacketTests.dataWithMetadata)! + #expect(packetWithMetadata.syncByte == TSPacket.defaultSyncByte) + #expect(packetWithMetadata.pid == 17) + #expect(packetWithMetadata.data == TSPacketTests.dataWithMetadata) + } + + @Test func programClockReference() { + let data = Data([0, 1, 66, 68, 126, 0]) + let (b, e) = TSProgramClockReference.decode(data) + #expect(data == TSProgramClockReference.encode(b, e)) + } + + @Test func timestamp() { + #expect(0 == TSTimestamp.decode(Data([49, 0, 1, 0, 1]))) + #expect(0 == TSTimestamp.decode(Data([17, 0, 1, 0, 1]))) + #expect(Data([49, 0, 1, 0, 1]) == TSTimestamp.encode(0, TSTimestamp.ptsDtsMask)) + #expect(Data([17, 0, 1, 0, 1]) == TSTimestamp.encode(0, TSTimestamp.ptsMask)) + } +} diff --git a/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSProgramTests.swift b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSProgramTests.swift new file mode 100644 index 000000000..4ad7cda42 --- /dev/null +++ b/Vendor/HaishinKit.swift/SRTHaishinKit/Tests/TS/TSProgramTests.swift @@ -0,0 +1,20 @@ +import Foundation +import Testing + +@testable import SRTHaishinKit + +@Suite struct TSProgramTests { + static let dataForPAT: Data = .init([0, 0, 176, 13, 0, 1, 193, 0, 0, 0, 1, 240, 0, 42, 177, 4, 178]) + static let dataForPMT: Data = .init([0, 2, 176, 29, 0, 1, 193, 0, 0, 225, 0, 240, 0, 27, 225, 0, 240, 0, 15, 225, 1, 240, 6, 10, 4, 117, 110, 100, 0, 8, 125, 232, 119]) + + @Test func pat() { + let pat = TSProgramAssociation(TSProgramTests.dataForPAT)! + #expect(pat.programs == [1: 4096]) + #expect(pat.data == TSProgramTests.dataForPAT) + } + + @Test func pmt() { + let pmt = TSProgramMap(TSProgramTests.dataForPMT)! + #expect(pmt.data == TSProgramTests.dataForPMT) + } +} diff --git a/Vendor/HaishinKit.swift/fastlane/Appfile b/Vendor/HaishinKit.swift/fastlane/Appfile new file mode 100644 index 000000000..180306309 --- /dev/null +++ b/Vendor/HaishinKit.swift/fastlane/Appfile @@ -0,0 +1,6 @@ +# app_identifier("[[APP_IDENTIFIER]]") # The bundle identifier of your app +# apple_id("[[APPLE_ID]]") # Your Apple email address + + +# For more information about the Appfile, see: +# https://docs.fastlane.tools/advanced/#appfile diff --git a/Vendor/HaishinKit.swift/fastlane/Fastfile b/Vendor/HaishinKit.swift/fastlane/Fastfile new file mode 100644 index 000000000..1d5e724bb --- /dev/null +++ b/Vendor/HaishinKit.swift/fastlane/Fastfile @@ -0,0 +1,66 @@ +desc 'Review PullRequest.' +lane :review do + + spm( + command: 'test', + package_path: 'HaishinKit' + ) + + spm( + command: 'test', + package_path: 'SRTHaishinKit' + ) + + begin + danger( + danger_id: 'danger', + dangerfile: 'Dangerfile', + verbose: true + ) + rescue StandardError + end +end + +desc 'Creates documents.' +lane :document do |options| + if options[:version] == '' + UI.error('Version is a required argument!') + next + end + + version = options[:version] + + Dir.chdir('..') do + sh 'git clone git@github.com:HaishinKit/docs.haishinkit.com.git' + + if Helper.is_ci? + sh "git config --global user.email 'action@github.com'" + sh "git config --global user.name 'GitHub Action'" + end + + sh "rm -rf ./docs.haishinkit.com/swift/#{version}" + sh <