From da4fff405791d20cf0dbc45e4bfb55ddfab60222 Mon Sep 17 00:00:00 2001 From: Filipi Fuchter Date: Mon, 20 Oct 2025 15:58:26 -0300 Subject: [PATCH] Pipecat client iOS SmallWebRTC 1.1.0 spec. --- .swift-format | 54 +++ .swiftlint | 113 ++++++ Package.resolved | 4 +- Package.swift | 2 +- .../SmallWebRTCConnection.swift | 360 +++++++++++------- .../SmallWebRTCTransport.swift | 321 +++++++++------- .../NSLayoutConstraint+Extensions.swift | 4 +- .../extensions/RTCAudioTrack.swift | 7 +- .../extensions/RTCVideoTrack.swift | 7 +- .../extensions/RTVIExtensions.swift | 37 -- .../types/Device.swift | 22 +- .../types/SessionDescription.swift | 28 +- .../types/SignallingMessage.swift | 103 ++++- ...SmallWebRTCTransportConnectionParams.swift | 21 + .../util/AudioManager.swift | 183 ++++----- .../util/Logger.swift | 24 +- .../SmallWebRTCTransportTrackExtension.swift | 130 +++++++ .../util/VideoManager.swift | 2 +- .../view/SmallWebRTCVideoView.swift | 6 +- .../view/SmallWebRTCVideoViewSwiftUI.swift | 2 +- .../view/VideoView.swift | 4 +- scripts/formatCode.sh | 5 + 22 files changed, 987 insertions(+), 452 deletions(-) create mode 100644 .swift-format create mode 100644 .swiftlint delete mode 100644 Sources/PipecatClientIOSSmallWebrtc/extensions/RTVIExtensions.swift create mode 100644 Sources/PipecatClientIOSSmallWebrtc/types/SmallWebRTCTransportConnectionParams.swift create mode 100644 Sources/PipecatClientIOSSmallWebrtc/util/SmallWebRTCTransportTrackExtension.swift create mode 100755 scripts/formatCode.sh diff --git a/.swift-format b/.swift-format new file mode 100644 index 0000000..8b37c7d --- /dev/null +++ b/.swift-format @@ -0,0 +1,54 @@ +{ + "fileScopedDeclarationPrivacy": { + "accessLevel": "private" + }, + "indentation": { + "spaces": 4 + }, + "indentConditionalCompilationBlocks": false, + "indentSwitchCaseLabels": false, + "lineBreakAroundMultilineExpressionChainComponents": true, + "lineBreakBeforeControlFlowKeywords": false, + "lineBreakBeforeEachArgument": true, + "lineBreakBeforeEachGenericRequirement": true, + "lineLength": 120, + "maximumBlankLines": 1, + "prioritizeKeepingFunctionOutputTogether": false, + "respectsExistingLineBreaks": true, + "rules": { + "AllPublicDeclarationsHaveDocumentation": false, + "AlwaysUseLowerCamelCase": true, + "AmbiguousTrailingClosureOverload": true, + "BeginDocumentationCommentWithOneLineSummary": false, + "DoNotUseSemicolons": true, + "DontRepeatTypeInStaticProperties": true, + "FileScopedDeclarationPrivacy": true, + "FullyIndirectEnum": true, + "GroupNumericLiterals": true, + "IdentifiersMustBeASCII": true, + "NeverForceUnwrap": false, + "NeverUseForceTry": false, + "NeverUseImplicitlyUnwrappedOptionals": false, + "NoAccessLevelOnExtensionDeclaration": true, + "NoBlockComments": true, + "NoCasesWithOnlyFallthrough": true, + "NoEmptyTrailingClosureParentheses": true, + "NoLabelsInCasePatterns": true, + "NoLeadingUnderscores": false, + "NoParensAroundConditions": true, + "NoVoidReturnOnFunctionSignature": true, + "OneCasePerLine": true, + "OneVariableDeclarationPerLine": true, + "OnlyOneTrailingClosureArgument": true, + "OrderedImports": false, + "ReturnVoidInsteadOfEmptyTuple": true, + "UseLetInEveryBoundCaseVariable": true, + "UseShorthandTypeNames": true, + "UseSingleLinePropertyGetter": true, + "UseSynthesizedInitializer": true, + "UseTripleSlashForDocumentationComments": true, + "ValidateDocumentationComments": false + }, + "tabWidth": 4, + "version": 1 +} diff --git a/.swiftlint b/.swiftlint new file mode 100644 index 0000000..e067fed --- /dev/null +++ b/.swiftlint @@ -0,0 +1,113 @@ +# By default, SwiftLint uses a set of sensible default rules you can adjust: + +disabled_rules: + - redundant_optional_initialization # nope, consistency/explicitness > brevity! + - empty_enum_arguments # nope, consistency/explicitness > brevity! + - closure_parameter_position + - trailing_comma # just nope, this is a feature, not a bug! + - redundant_string_enum_value # nope, consistency/explicitness > brevity! + - unused_closure_parameter + - opening_brace # nope, since this prevents proper formatting of `where` constraints + - redundant_discardable_let + +opt_in_rules: + - closure_body_length + - closure_end_indentation + - closure_spacing + - collection_alignment + - contains_over_filter_count + - contains_over_filter_is_empty + - contains_over_first_not_nil + - contains_over_range_nil_comparison + - convenience_type + - discouraged_object_literal + - discouraged_optional_collection + - empty_count + - empty_string + - fallthrough + - file_name_no_space + - first_where + - flatmap_over_map_reduce + - force_unwrapping + - function_default_parameter_at_end + - implicit_return + - implicitly_unwrapped_optional + - indentation_width + - joined_default_parameter + - last_where + - legacy_multiple + - legacy_random + - literal_expression_end_indentation + - multiline_function_chains + - no_extension_access_modifier + - operator_usage_whitespace + - private_action + - private_outlet + - redundant_set_access_control + - sorted_first_last + - switch_case_on_newline + - unneeded_parentheses_in_closure_argument + - unowned_variable_capture + - vertical_whitespace_opening_braces + +# Rules run by `swiftlint analyze` (experimental) +analyzer_rules: + - explicit_self + - unused_import + +force_cast: warning # implicitly +force_try: + severity: warning # explicitly + +line_length: + warning: 120 + error: 150 + +type_body_length: + - 300 # warning + - 400 # error + +file_length: + warning: 750 + error: 1000 + +type_name: + min_length: 4 # only warning + max_length: # warning and error + warning: 50 + error: 70 + excluded: + - Key + - ID + allowed_symbols: ["_"] # these are allowed in type names + +identifier_name: + min_length: # only min_length + warning: 2 # only warn + excluded: # excluded via string array + - i + - x + - y + - id + +indentation_width: + indentation_width: 4 + +custom_rules: + # https://www.swift.org/documentation/api-design-guidelines/#follow-case-conventions + uniform_casing_for_id: + name: "Use id or ID instead of Id" + regex: '(? Void)? + private var localAudioTrack: RTCAudioTrack? private var remoteAudioTrack: RTCAudioTrack? - + private var videoCapturer: RTCVideoCapturer? private var localVideoTrack: RTCVideoTrack? private var remoteVideoTrack: RTCVideoTrack? - + private var iceGatheringCompleted = false - + private var enableCam: Bool private var enableMic: Bool - + @available(*, unavailable) override init() { fatalError("SmallWebRTCConnection:init is unavailable") } - + required init(iceServers: [String], enableCam: Bool, enableMic: Bool) { self.enableCam = enableCam self.enableMic = enableMic - + let config = RTCConfiguration() if !iceServers.isEmpty { config.iceServers = [RTCIceServer(urlStrings: iceServers)] } - + // Unified plan is more superior than planB config.sdpSemantics = .unifiedPlan - + // gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client config.continualGatheringPolicy = .gatherOnce - + // Define media constraints. DtlsSrtpKeyAgreement is required to be true to be able to connect with web browsers. - let constraints = RTCMediaConstraints(mandatoryConstraints: nil, - optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue]) - - guard let peerConnection = SmallWebRTCConnection.factory.peerConnection(with: config, constraints: constraints, delegate: nil) else { + let constraints = RTCMediaConstraints( + mandatoryConstraints: nil, + optionalConstraints: ["DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue] + ) + + guard + let peerConnection = SmallWebRTCConnection.factory.peerConnection( + with: config, + constraints: constraints, + delegate: nil + ) + else { fatalError("Could not create new RTCPeerConnection") } - + self.peerConnection = peerConnection super.init() - + self.addInitialTransceivers() self.createMediaSenders() self.peerConnection.delegate = self } - + // MARK: Signaling func offer(completion: @escaping (_ sdp: RTCSessionDescription) -> Void) { // 1. Create the offer let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstraints, optionalConstraints: nil) - + self.peerConnection.offer(for: constrains) { (sdp, error) in guard let sdp = sdp else { Logger.shared.debug("Error creating offer: \(String(describing: error))") return } - + // 2. Set the local description to trigger ICE gathering self.peerConnection.setLocalDescription(sdp) { (error) in if let error = error { Logger.shared.debug("Error setting local description: \(error)") return } - + // Now ICE gathering will start, we need to wait for it to complete self.waitForIceGathering(completion: { // Manipulating so we can choose the codec @@ -102,22 +118,23 @@ final class SmallWebRTCConnection: NSObject { } } } - + private func waitForIceGathering(completion: @escaping () -> Void) { // Wait until ICE gathering is complete - DispatchQueue.global().async { - while self.peerConnection.iceGatheringState != .complete { - // Sleep to avoid blocking the main thread - Thread.sleep(forTimeInterval: 0.1) - } - - // Once gathering is complete, proceed with the callback - DispatchQueue.main.async { - completion() + DispatchQueue.global() + .async { + while self.peerConnection.iceGatheringState != .complete { + // Sleep to avoid blocking the main thread + Thread.sleep(forTimeInterval: 0.1) + } + + // Once gathering is complete, proceed with the callback + DispatchQueue.main.async { + completion() + } } - } } - + func offer() async throws -> RTCSessionDescription { return try await withCheckedThrowingContinuation { continuation in self.offer { sdp in @@ -125,21 +142,26 @@ final class SmallWebRTCConnection: NSObject { } } } - - func answer(completion: @escaping (_ sdp: RTCSessionDescription) -> Void) { - let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstraints, - optionalConstraints: nil) - self.peerConnection.answer(for: constrains) { (sdp, error) in + + func answer(completion: @escaping (_ sdp: RTCSessionDescription) -> Void) { + let constrains = RTCMediaConstraints( + mandatoryConstraints: self.mediaConstraints, + optionalConstraints: nil + ) + self.peerConnection.answer(for: constrains) { (sdp, _) in guard let sdp = sdp else { return } - - self.peerConnection.setLocalDescription(sdp, completionHandler: { (error) in - completion(sdp) - }) + + self.peerConnection.setLocalDescription( + sdp, + completionHandler: { (_) in + completion(sdp) + } + ) } } - + func answer() async throws -> RTCSessionDescription { return try await withCheckedThrowingContinuation { continuation in self.answer { sdp in @@ -147,31 +169,31 @@ final class SmallWebRTCConnection: NSObject { } } } - - func set(remoteSdp: RTCSessionDescription, completion: @escaping (Error?) -> ()) { + + func set(remoteSdp: RTCSessionDescription, completion: @escaping (Error?) -> Void) { self.peerConnection.setRemoteDescription(remoteSdp, completionHandler: completion) } - - func set(remoteCandidate: RTCIceCandidate, completion: @escaping (Error?) -> ()) { + + func set(remoteCandidate: RTCIceCandidate, completion: @escaping (Error?) -> Void) { self.peerConnection.add(remoteCandidate, completionHandler: completion) } - + func getLocalAudioTrack() -> RTCAudioTrack? { return self.localAudioTrack } - + func getRemoteAudioTrack() -> RTCAudioTrack? { return self.remoteAudioTrack } - + func getLocalVideoTrack() -> RTCVideoTrack? { return self.localVideoTrack } - + func getRemoteVideoTrack() -> RTCVideoTrack? { return self.remoteVideoTrack } - + // MARK: Media func stopLocalVideoCapturer() { guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else { @@ -179,14 +201,14 @@ final class SmallWebRTCConnection: NSObject { } capturer.stopCapture() } - + func startOrSwitchLocalVideoCapturer(deviceID: String? = nil) { guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else { return } - + let captureDevices = RTCCameraVideoCapturer.captureDevices() - + // Select device: use provided deviceID or default to front camera let selectedDevice: AVCaptureDevice? if let deviceID = deviceID { @@ -202,37 +224,43 @@ final class SmallWebRTCConnection: NSObject { return } } - + guard let device = selectedDevice else { return } - + // Choose highest resolution format - guard let format = RTCCameraVideoCapturer.supportedFormats(for: device) - .sorted(by: { f1, f2 in - let width1 = CMVideoFormatDescriptionGetDimensions(f1.formatDescription).width - let width2 = CMVideoFormatDescriptionGetDimensions(f2.formatDescription).width - return width1 < width2 - }).last, - - // Choose highest fps - let fps = format.videoSupportedFrameRateRanges - .sorted(by: { $0.maxFrameRate < $1.maxFrameRate }).last else { + guard + let format = RTCCameraVideoCapturer.supportedFormats(for: device) + .sorted(by: { f1, f2 in + let width1 = CMVideoFormatDescriptionGetDimensions(f1.formatDescription).width + let width2 = CMVideoFormatDescriptionGetDimensions(f2.formatDescription).width + return width1 < width2 + }) + .last, + + // Choose highest fps + let fps = format.videoSupportedFrameRateRanges + .sorted(by: { $0.maxFrameRate < $1.maxFrameRate }).last + else { return } - + Logger.shared.info("Starting capture on: \(device.localizedName) at \(fps.maxFrameRate) FPS") - + capturer.startCapture(with: device, format: format, fps: Int(fps.maxFrameRate)) } - + func getCurrentCamera() -> Device? { guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else { return nil } - - guard let currentDevice = capturer.captureSession.inputs.compactMap({ ($0 as? AVCaptureDeviceInput)?.device }).first else { + + guard + let currentDevice = capturer.captureSession.inputs.compactMap({ ($0 as? AVCaptureDeviceInput)?.device }) + .first + else { return nil } - + return Device( deviceID: currentDevice.uniqueID, groupID: "", @@ -240,7 +268,7 @@ final class SmallWebRTCConnection: NSObject { label: currentDevice.localizedName ) } - + private func addInitialTransceivers() { // Adding an audio transceiver with sendrecv direction let transceiverInit = RTCRtpTransceiverInit() @@ -249,56 +277,72 @@ final class SmallWebRTCConnection: NSObject { // Adding a video transceiver with sendrecv direction self.peerConnection.addTransceiver(of: .video, init: transceiverInit) } - + private func getAudioTransceiver() -> RTCRtpTransceiver? { // Transceivers are created in order, so the first one should be audio return self.peerConnection.transceivers.first } - + private func getVideoTransceiver() -> RTCRtpTransceiver? { // The second transceiver should be video return self.peerConnection.transceivers.dropFirst().first } - + private func createMediaSenders() { // Audio - if (self.enableMic) { - let audioTrack = self.createAudioTrack() - self.getAudioTransceiver()?.sender.track = audioTrack - self.localAudioTrack = audioTrack + if self.enableMic { + self.createLocalAudioTrack() } - + // Video - if (self.enableCam) { - let videoTrack = self.createVideoTrack() - self.getVideoTransceiver()?.sender.track = videoTrack - self.localVideoTrack = videoTrack - self.remoteVideoTrack = self.getVideoTransceiver()?.receiver.track as? RTCVideoTrack + if self.enableCam { + self.createLocalVideoTrack() } - + // Data if let dataChannel = self.createDataChannel(label: "rtvi-events") { dataChannel.delegate = self self.signallingDataChannel = dataChannel } } - + + private func createLocalAudioTrack() { + if self.localAudioTrack != nil { + return + } + let audioTrack = self.createAudioTrack() + self.getAudioTransceiver()?.sender.track = audioTrack + self.localAudioTrack = audioTrack + self.delegate?.onTracksUpdated() + } + + private func createLocalVideoTrack() { + if self.localVideoTrack != nil { + return + } + let videoTrack = self.createVideoTrack() + self.getVideoTransceiver()?.sender.track = videoTrack + self.localVideoTrack = videoTrack + self.remoteVideoTrack = self.getVideoTransceiver()?.receiver.track as? RTCVideoTrack + self.delegate?.onTracksUpdated() + } + private func createAudioTrack() -> RTCAudioTrack { let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil) let audioSource = SmallWebRTCConnection.factory.audioSource(with: audioConstrains) let audioTrack = SmallWebRTCConnection.factory.audioTrack(with: audioSource, trackId: UUID().uuidString) return audioTrack } - + private func createVideoTrack() -> RTCVideoTrack { let videoSource = SmallWebRTCConnection.factory.videoSource() self.videoCapturer = RTCCameraVideoCapturer(delegate: videoSource) let videoTrack = SmallWebRTCConnection.factory.videoTrack(with: videoSource, trackId: UUID().uuidString) return videoTrack } - + // MARK: Data Channels - private func createDataChannel(label:String) -> RTCDataChannel? { + private func createDataChannel(label: String) -> RTCDataChannel? { let config = RTCDataChannelConfiguration() guard let dataChannel = self.peerConnection.dataChannel(forLabel: label, configuration: config) else { Logger.shared.debug("Warning: Couldn't create data channel.") @@ -306,18 +350,18 @@ final class SmallWebRTCConnection: NSObject { } return dataChannel } - - func sendMessage( message: Encodable) throws{ - let jsonData = try JSONEncoder().encode(message); + + func sendMessage(message: Encodable) throws { + let jsonData = try JSONEncoder().encode(message) Logger.shared.debug("Sending message: \(String(data: jsonData, encoding: .utf8) ?? "")") let buffer = RTCDataBuffer(data: jsonData, isBinary: true) self.signallingDataChannel?.sendData(buffer) } - + func disconnect() { self.signallingDataChannel?.close() self.peerConnection.close() - + self.signallingDataChannel = nil self.localAudioTrack = nil self.remoteAudioTrack = nil @@ -327,11 +371,11 @@ final class SmallWebRTCConnection: NSObject { } extension SmallWebRTCConnection: RTCPeerConnectionDelegate { - + func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) { Logger.shared.debug("peerConnection new signaling state: \(stateChanged)") } - + func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) { Logger.shared.debug("peerConnection did add stream") if !stream.audioTracks.isEmpty { @@ -339,43 +383,44 @@ extension SmallWebRTCConnection: RTCPeerConnectionDelegate { self.delegate?.onTracksUpdated() } } - + func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) { Logger.shared.debug("peerConnection did remove stream") - if !stream.audioTracks.isEmpty && self.remoteAudioTrack != nil && self.remoteAudioTrack?.trackId == stream.audioTracks[0].trackId { + if !stream.audioTracks.isEmpty && self.remoteAudioTrack != nil + && self.remoteAudioTrack?.trackId == stream.audioTracks[0].trackId { self.remoteAudioTrack = nil self.delegate?.onTracksUpdated() } } - + func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) { Logger.shared.debug("peerConnection should negotiate") } - + func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) { Logger.shared.debug("peerConnection new connection state: \(newState)") self.delegate?.onConnectionStateChanged(state: newState) } - + func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) { Logger.shared.debug("peerConnection new gathering state: \(newState)") if newState == .complete { self.iceGatheringCompleted = true } } - + func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) { Logger.shared.debug("peerConnection did discover new ice candidate \(candidate.sdp)") } - + func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) { Logger.shared.debug("peerConnection did remove candidate(s)") } - + func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) { Logger.shared.debug("peerConnection did receive new data channel") } - + } extension SmallWebRTCConnection { private func setTrackEnabled(_ type: T.Type, isEnabled: Bool) { @@ -385,24 +430,27 @@ extension SmallWebRTCConnection { } } -// MARK:- Audio and Video control +// MARK: - Audio and Video control extension SmallWebRTCConnection { func muteAudio() { self.setAudioEnabled(false) } - + func unmuteAudio() { + if self.localAudioTrack == nil { + self.createLocalAudioTrack() + } self.setAudioEnabled(true) } - + func isAudioEnabled() -> Bool { return self.localAudioTrack?.isEnabled ?? true } - + private func setAudioEnabled(_ isEnabled: Bool) { setTrackEnabled(RTCAudioTrack.self, isEnabled: isEnabled) } - + func hideVideo() { guard self.localVideoTrack?.isEnabled == true else { // nothing to do here @@ -410,30 +458,39 @@ extension SmallWebRTCConnection { } self.setVideoEnabled(false) } - + func showVideo() { + if self.localVideoTrack == nil { + self.createLocalVideoTrack() + } guard self.localVideoTrack?.isEnabled == false else { // nothing to do here return } self.setVideoEnabled(true) } - + func isVideoEnabled() -> Bool { return self.localVideoTrack?.isEnabled ?? false } - + private func setVideoEnabled(_ isEnabled: Bool) { setTrackEnabled(RTCVideoTrack.self, isEnabled: isEnabled) } } extension SmallWebRTCConnection: RTCDataChannelDelegate { - + func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) { Logger.shared.debug("dataChannel did change state: \(dataChannel.readyState)") + if dataChannel.readyState == .open { + // Call the handler when the channel opens + dataChannelOpenHandler?() + // Clear the handler after it's called + dataChannelOpenHandler = nil + } } - + func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) { do { let receivedValue = try JSONDecoder().decode(Value.self, from: buffer.data) @@ -442,56 +499,71 @@ extension SmallWebRTCConnection: RTCDataChannelDelegate { Logger.shared.error("Error decoding JSON into Value: \(error.localizedDescription)") } } - + + public func waitForDataChannelOpen() async throws { + return try await withCheckedThrowingContinuation { continuation in + if self.signallingDataChannel?.readyState == .open { + continuation.resume() + return + } + + // Set up an observer or callback to be notified when data channel opens + dataChannelOpenHandler = { + continuation.resume() + } + } + } + } // handle codecs manipulation extension SmallWebRTCConnection { - + func filterCodec(kind: String, codec: String, in sdp: String) -> String { var allowedPayloadTypes: [String] = [] let lines = sdp.components(separatedBy: "\n") var isMediaSection = false var modifiedLines: [String] = [] - + let codecPattern = "a=rtpmap:(\\d+) \(NSRegularExpression.escapedPattern(for: codec))" let rtxPattern = "a=fmtp:(\\d+) apt=(\\d+)" let mediaPattern = "m=\(kind) \\d+ [A-Z/]+(?: (\\d+))*" - + guard let codecRegex = try? NSRegularExpression(pattern: codecPattern), - let rtxRegex = try? NSRegularExpression(pattern: rtxPattern), - let mediaRegex = try? NSRegularExpression(pattern: mediaPattern) else { + let rtxRegex = try? NSRegularExpression(pattern: rtxPattern), + let mediaRegex = try? NSRegularExpression(pattern: mediaPattern) + else { return sdp } - + for line in lines { if line.starts(with: "m=\(kind) ") { isMediaSection = true } else if line.starts(with: "m=") { isMediaSection = false } - + if isMediaSection { if let match = codecRegex.firstMatch(in: line, range: NSRange(line.startIndex..., in: line)), - let payloadRange = Range(match.range(at: 1), in: line) { + let payloadRange = Range(match.range(at: 1), in: line) { allowedPayloadTypes.append(String(line[payloadRange])) } - + if let match = rtxRegex.firstMatch(in: line, range: NSRange(line.startIndex..., in: line)), - let payloadTypeRange = Range(match.range(at: 1), in: line), - let aptRange = Range(match.range(at: 2), in: line), - allowedPayloadTypes.contains(String(line[aptRange])) { + let payloadTypeRange = Range(match.range(at: 1), in: line), + let aptRange = Range(match.range(at: 2), in: line), + allowedPayloadTypes.contains(String(line[aptRange])) { allowedPayloadTypes.append(String(line[payloadTypeRange])) } } } - + isMediaSection = false for line in lines { if line.starts(with: "m=\(kind) ") { isMediaSection = true if let match = mediaRegex.firstMatch(in: line, range: NSRange(line.startIndex..., in: line)), - let mediaLineRange = Range(match.range(at: 0), in: line) { + let mediaLineRange = Range(match.range(at: 0), in: line) { let mediaLine = String(line[mediaLineRange]) let newMediaLine = mediaLine + " " + allowedPayloadTypes.joined(separator: " ") modifiedLines.append(newMediaLine) @@ -500,20 +572,20 @@ extension SmallWebRTCConnection { } else if line.starts(with: "m=") { isMediaSection = false } - + if isMediaSection { let skipPatterns = ["a=rtpmap:", "a=fmtp:", "a=rtcp-fb:"] if skipPatterns.contains(where: { line.starts(with: $0) }), - let payloadType = line.split(separator: ":").last?.split(separator: " ").first, - !allowedPayloadTypes.contains(String(payloadType)) { + let payloadType = line.split(separator: ":").last?.split(separator: " ").first, + !allowedPayloadTypes.contains(String(payloadType)) { continue } } - + modifiedLines.append(line) } - + return modifiedLines.joined(separator: "\n") } - + } diff --git a/Sources/PipecatClientIOSSmallWebrtc/SmallWebRTCTransport.swift b/Sources/PipecatClientIOSSmallWebrtc/SmallWebRTCTransport.swift index 6639824..b1a79c2 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/SmallWebRTCTransport.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/SmallWebRTCTransport.swift @@ -5,16 +5,14 @@ import WebRTC /// An RTVI transport to connect with the SmallWebRTCTransport backend. public class SmallWebRTCTransport: Transport { - - public static let SERVICE_NAME = "small-webrtc-transport"; - private var iceServers: [String] = [] - private let options: RTVIClientOptions + private var options: PipecatClientOptions? + private var smallWebRTConnectionParams: SmallWebRTCTransportConnectionParams? private var _state: TransportState = .disconnected - private var smallWebRTCConnection: SmallWebRTCConnection? = nil + private var smallWebRTCConnection: SmallWebRTCConnection? private let audioManager = AudioManager() private let videoManager = VideoManager() - private var connectedBotParticipant = Participant( + var connectedBotParticipant = Participant( id: ParticipantId(id: UUID().uuidString), name: "Small WebRTC Bot", local: false @@ -23,119 +21,140 @@ public class SmallWebRTCTransport: Transport { private var _selectedMic: MediaDeviceInfo? private var pc_id: String? private var preferredCamId: PipecatClientIOS.MediaDeviceId? - + private var _tracks: Tracks? + // MARK: - Public - + /// Voice client delegate (used directly by user's code) - public weak var delegate: PipecatClientIOS.RTVIClientDelegate? - + public weak var delegate: PipecatClientIOS.PipecatClientDelegate? + /// RTVI inbound message handler (for sending RTVI-style messages to voice client code to handle) public var onMessage: ((PipecatClientIOS.RTVIMessageInbound) -> Void)? - - public required convenience init(options: PipecatClientIOS.RTVIClientOptions) { - self.init(options: options, iceServers: nil) + + public required convenience init() { + self.init(iceServers: nil) } - - public init(options: PipecatClientIOS.RTVIClientOptions, iceServers: [String]?) { - self.options = options + + public init(iceServers: [String]?) { self.audioManager.delegate = self if iceServers != nil { self.iceServers = iceServers! } } - + + public func initialize(options: PipecatClientOptions) { + self.options = options + } + + func handleTracksUpdated() { + guard let currentTracks = self.tracks() else { + // Nothing to do here, no tracks available yet + return + } + + if let previousTracks = self._tracks { + self.handleTrackChanges(previous: previousTracks, current: currentTracks) + } else { + // First time tracks are available, notify all starting tracks + self.handleInitialTracks(tracks: currentTracks) + } + + self._tracks = currentTracks + } + public func initDevices() async throws { - if (self.devicesInitialized) { + if self.devicesInitialized { // There is nothing to do in this case return } - + self.setState(state: .initializing) - + // start managing audio device configuration self.audioManager.startManagingIfNecessary() - + // initialize devices state and report initial available & selected devices self._selectedMic = self.getSelectedMic() - self.delegate?.onAvailableMicsUpdated(mics: self.getAllMics()); + self.delegate?.onAvailableMicsUpdated(mics: self.getAllMics()) self.delegate?.onMicUpdated(mic: self._selectedMic) - + self.delegate?.onAvailableCamsUpdated(cams: self.getAllCams()) + self.delegate?.onCamUpdated(cam: self.selectedCam()) + self.setState(state: .initialized) self.devicesInitialized = true } - + public func release() { // stop managing audio device configuration self.audioManager.stopManaging() self._selectedMic = nil VideoTrackRegistry.clearRegistry() } - - private func sendOffer(connectUrl: String, offer: SmallWebRTCSessionDescription) async throws -> SmallWebRTCSessionDescription { - guard let url = URL(string: connectUrl) else { - throw InvalidAuthBundleError() - } - + + private func sendOffer(connectUrl: URL, offer: SmallWebRTCSessionDescription) async throws + -> SmallWebRTCSessionDescription { Logger.shared.debug("connectUrl, \(connectUrl)") - - var request = URLRequest(url: url) + + var request = URLRequest(url: connectUrl) request.httpMethod = "POST" - + // headers request.setValue("application/json", forHTTPHeaderField: "Content-Type") - + do { - /*var customBundle:Value = Value.object([ - "sdp": Value.string(sdp.sdp), - "type": Value.number(Double(sdp.type.rawValue)) - ])*/ request.httpBody = try JSONEncoder().encode(offer) - + Logger.shared.debug("Will send offer") - + let (data, response) = try await URLSession.shared.data(for: request) - - guard let httpResponse = response as? HTTPURLResponse, ( httpResponse.statusCode >= 200 && httpResponse.statusCode <= 299 ) else { + + guard let httpResponse = response as? HTTPURLResponse, + httpResponse.statusCode >= 200 && httpResponse.statusCode <= 299 + else { let errorMessage = String(data: data, encoding: .utf8) ?? "Unknown error" let message = "Failed while authenticating: \(errorMessage)" throw HttpError(message: message) } - + let answer = try JSONDecoder().decode(SmallWebRTCSessionDescription.self, from: data) - + Logger.shared.debug("Received answer") - + return answer } catch { throw HttpError(message: "Failed while trying to receive answer.", underlyingError: error) } } - + private func negotiate() async throws { // start connecting guard let webrtcClient = self.smallWebRTCConnection else { Logger.shared.warn("Unable to negotiate, no peer connection available.") return } + guard let smallWebRTConnectionParams = self.smallWebRTConnectionParams else { + Logger.shared.warn("Unable to negotiate, no connection params available.") + return + } do { let sdp = try await webrtcClient.offer() - - guard let connectUrl = self.options.params.config.serverUrl else { - Logger.shared.error("Missing Base URL") - return - } - - var offer = SmallWebRTCSessionDescription(from:sdp) + + let connectUrl = smallWebRTConnectionParams.webrtcRequestParams.endpoint + + var offer = SmallWebRTCSessionDescription(from: sdp) offer.pc_id = self.pc_id - + let answer = try await self.sendOffer(connectUrl: connectUrl, offer: offer) self.pc_id = answer.pc_id - - webrtcClient.set(remoteSdp: answer.rtcSessionDescription, completion: { error in - if let error = error { - Logger.shared.error("Failed to set remote SDP: \(error.localizedDescription)") + + webrtcClient.set( + remoteSdp: answer.rtcSessionDescription, + completion: { error in + if let error = error { + Logger.shared.error("Failed to set remote SDP: \(error.localizedDescription)") + } } - }) + ) } catch { Logger.shared.error("Received error while trying to connect \(error)") self.smallWebRTCConnection = nil @@ -143,68 +162,101 @@ public class SmallWebRTCTransport: Transport { throw error } } - - public func connect(authBundle: PipecatClientIOS.AuthBundle?) async throws { + + public func connect(transportParams: TransportConnectionParams?) async throws { self.setState(state: .connecting) - - let webrtcClient = SmallWebRTCConnection(iceServers: self.iceServers, enableCam: self.options.enableCam, enableMic: self.options.enableMic) + + guard let smallWebRTConnectionParams = transportParams as? SmallWebRTCTransportConnectionParams else { + throw InvalidTransportParamsError() + } + self.smallWebRTConnectionParams = smallWebRTConnectionParams + + let webrtcClient = SmallWebRTCConnection( + iceServers: self.iceServers, + enableCam: self.options?.enableCam ?? false, + enableMic: self.options?.enableMic ?? true + ) webrtcClient.delegate = self webrtcClient.startOrSwitchLocalVideoCapturer(deviceID: self.preferredCamId?.id) self.smallWebRTCConnection = webrtcClient - + try await self.negotiate() - + + // Wait for the data channel to be open before setting state to connected + try await webrtcClient.waitForDataChannelOpen() + self.setState(state: .connected) + + try self.sendMessage(message: RTVIMessageOutbound.clientReady()) + self.syncTrackStatus() } - + + private func syncTrackStatus() { + guard let smallWebRTCConnection = self.smallWebRTCConnection else { return } + self.sendSignallingMessage( + message: TrackStatusMessage.init( + receiverIndex: SmallWebRTCTransceiverIndex.audio.rawValue, + enabled: smallWebRTCConnection.isAudioEnabled() + ) + ) + self.sendSignallingMessage( + message: TrackStatusMessage.init( + receiverIndex: SmallWebRTCTransceiverIndex.video.rawValue, + enabled: smallWebRTCConnection.isVideoEnabled() + ) + ) + } + public func disconnect() async throws { // stop websocket connection self.smallWebRTCConnection?.disconnect() self.smallWebRTCConnection = nil - - self.delegate?.onTracksUpdated(tracks: self.tracks()!) - + self.handleTracksUpdated() self.setState(state: .disconnected) } - + public func getAllMics() -> [PipecatClientIOS.MediaDeviceInfo] { audioManager.availableDevices.map { $0.toRtvi() } } - + public func getAllCams() -> [PipecatClientIOS.MediaDeviceInfo] { videoManager.availableDevices.map { $0.toRtvi() } } - + public func updateMic(micId: PipecatClientIOS.MediaDeviceId) async throws { audioManager.preferredAudioDevice = .init(deviceID: micId.id) - + // Refresh what we should report as the selected mic refreshSelectedMicIfNeeded() } - + public func updateCam(camId: PipecatClientIOS.MediaDeviceId) async throws { self.preferredCamId = camId self.smallWebRTCConnection?.startOrSwitchLocalVideoCapturer(deviceID: camId.id) } - + /// What we report as the selected mic. public func selectedMic() -> PipecatClientIOS.MediaDeviceInfo? { _selectedMic } - + public func selectedCam() -> PipecatClientIOS.MediaDeviceInfo? { return self.smallWebRTCConnection?.getCurrentCamera()?.toRtvi() } - + public func enableMic(enable: Bool) async throws { if enable { self.smallWebRTCConnection?.unmuteAudio() } else { self.smallWebRTCConnection?.muteAudio() } + self.sendSignallingMessage( + message: TrackStatusMessage.init(receiverIndex: SmallWebRTCTransceiverIndex.audio.rawValue, enabled: enable) + ) } - + public func enableCam(enable: Bool) async throws { + Logger.shared.debug("Requested to enable cam: \(enable)") if enable { self.smallWebRTCConnection?.showVideo() self.smallWebRTCConnection?.startOrSwitchLocalVideoCapturer(deviceID: self.preferredCamId?.id) @@ -212,16 +264,19 @@ public class SmallWebRTCTransport: Transport { self.smallWebRTCConnection?.hideVideo() self.smallWebRTCConnection?.stopLocalVideoCapturer() } + self.sendSignallingMessage( + message: TrackStatusMessage.init(receiverIndex: SmallWebRTCTransceiverIndex.video.rawValue, enabled: enable) + ) } - + public func isCamEnabled() -> Bool { return self.smallWebRTCConnection?.isVideoEnabled() ?? false } - + public func isMicEnabled() -> Bool { return self.smallWebRTCConnection?.isAudioEnabled() ?? true } - + public func sendMessage(message: PipecatClientIOS.RTVIMessageOutbound) throws { do { try self.smallWebRTCConnection?.sendMessage(message: message) @@ -229,81 +284,85 @@ public class SmallWebRTCTransport: Transport { Logger.shared.error("Error sending message: \(error.localizedDescription)") } } - + + private func sendSignallingMessage(message: OutboundSignallingMessageProtocol) { + let signallingMessage = OutboundSignallingMessage.init(message: message) + do { + try self.smallWebRTCConnection?.sendMessage(message: signallingMessage) + } catch { + Logger.shared.error("Error sending signalling message: \(error.localizedDescription)") + } + } + public func state() -> PipecatClientIOS.TransportState { self._state } - + public func setState(state: PipecatClientIOS.TransportState) { let previousState = self._state - + self._state = state - + // Fire delegate methods as needed if state != previousState { self.delegate?.onTransportStateChanged(state: self._state) - + if state == .connected { self.delegate?.onConnected() // New bot participant id each time we connect - connectedBotParticipant = Participant( + self.connectedBotParticipant = Participant( id: ParticipantId(id: UUID().uuidString), name: connectedBotParticipant.name, local: connectedBotParticipant.local ) self.delegate?.onParticipantJoined(participant: connectedBotParticipant) self.delegate?.onBotConnected(participant: connectedBotParticipant) - } - else if state == .disconnected { + } else if state == .disconnected { self.delegate?.onParticipantLeft(participant: connectedBotParticipant) self.delegate?.onBotDisconnected(participant: connectedBotParticipant) self.delegate?.onDisconnected() } } } - - public func isConnected() -> Bool { - return [.connected, .ready].contains(self._state) - } - + public func tracks() -> PipecatClientIOS.Tracks? { // removing any track since we are going to store it again VideoTrackRegistry.clearRegistry() - + let localVideoTrack = self.smallWebRTCConnection?.getLocalVideoTrack() // Registering the track so we can retrieve it later inside the VoiceClientVideoView if let localVideoTrack = localVideoTrack { - VideoTrackRegistry.registerTrack(originalTrack: localVideoTrack, mediaTrackId: localVideoTrack.toRtvi()) + VideoTrackRegistry.registerTrack(originalTrack: localVideoTrack, mediaTrackId: localVideoTrack.toRtvi().id) } - + let botVideoTrack = self.smallWebRTCConnection?.getRemoteVideoTrack() // Registering the track so we can retrieve it later inside the VoiceClientVideoView if let botVideoTrack = botVideoTrack { - VideoTrackRegistry.registerTrack(originalTrack: botVideoTrack, mediaTrackId: botVideoTrack.toRtvi()) + VideoTrackRegistry.registerTrack(originalTrack: botVideoTrack, mediaTrackId: botVideoTrack.toRtvi().id) } - + return Tracks( local: ParticipantTracks( audio: self.smallWebRTCConnection?.getLocalAudioTrack()?.toRtvi(), - video: localVideoTrack?.toRtvi() + video: localVideoTrack?.toRtvi(), + screenAudio: nil, + screenVideo: nil ), bot: ParticipantTracks( audio: self.smallWebRTCConnection?.getRemoteAudioTrack()?.toRtvi(), - video: botVideoTrack?.toRtvi() + video: botVideoTrack?.toRtvi(), + screenAudio: nil, + screenVideo: nil ) ) } - - public func expiry() -> Int? { - return nil - } - + public func setIceServers(iceServers: [String]) { self.iceServers = iceServers } - + // MARK: - Private - + /// Refresh what we should report as the selected mic. private func refreshSelectedMicIfNeeded() { let newSelectedMic = getSelectedMic() @@ -312,30 +371,32 @@ public class SmallWebRTCTransport: Transport { delegate?.onMicUpdated(mic: _selectedMic) } } - + /// Selected mic is a value derived from the preferredAudioDevice and the set of available devices, so it may change whenever either of those change. private func getSelectedMic() -> PipecatClientIOS.MediaDeviceInfo? { - audioManager.availableDevices.first { $0.deviceID == audioManager.preferredAudioDeviceIfAvailable?.deviceID }?.toRtvi() + audioManager.availableDevices.first { $0.deviceID == audioManager.preferredAudioDeviceIfAvailable?.deviceID }? + .toRtvi() } } // MARK: - SmallWebRTCConnectionDelegate extension SmallWebRTCTransport: SmallWebRTCConnectionDelegate { - + func onConnectionStateChanged(state: RTCIceConnectionState) { - if ( state == .failed || state == .closed ) && ( self._state != .disconnected && self._state != .disconnecting ){ + if (state == .failed || state == .closed) && (self._state != .disconnected && self._state != .disconnecting) { Task { try await self.disconnect() } } } - + func onMsgReceived(msg: PipecatClientIOS.Value) { Task { let dict = msg.asObject - if (dict["type"] != nil && dict["type"]!!.asString == SIGNALLING_TYPE) { - if let message = SignallingMessage(rawValue: dict["message"]!!.asString) { + if dict["type"] != nil && dict["type"]!!.asString == SIGNALLING_TYPE { + let jsonData = Data(dict["message"]!!.asString.utf8) + if let message = try? JSONDecoder().decode(InboundSignallingMessage.self, from: jsonData) { await self.handleSignallingMessage(message) } } else { @@ -343,37 +404,41 @@ extension SmallWebRTCTransport: SmallWebRTCConnectionDelegate { } } } - + func onTracksUpdated() { - self.delegate?.onTracksUpdated(tracks: self.tracks()!) + self.handleTracksUpdated() } - + private func handleMessage(_ msg: Value) { let dict = msg.asObject if let typeValue = dict["label"] { if typeValue?.asString == "rtvi-ai" { Logger.shared.debug("Received RTVI message: \(msg)") - self.onMessage?(.init( - type: dict["type"]??.asString, - data: dict["data"]??.asString, - id: dict["id"]??.asString - )) + self.onMessage?( + .init( + type: dict["type"]??.asString, + data: dict["data"]??.asString, + id: dict["id"]??.asString + ) + ) } } } - - private func handleSignallingMessage(_ msg: SignallingMessage) async { + + private func handleSignallingMessage(_ msg: InboundSignallingMessage) async { Logger.shared.info("Handling signalling message: \(msg)") do { switch msg { case .renegotiate: try await self.negotiate() + case .peerLeft: + try await self.disconnect() } } catch { Logger.shared.error("Error while handling signalling message: \(error.localizedDescription)") } } - + } // MARK: - AudioManagerDelegate @@ -382,11 +447,11 @@ extension SmallWebRTCTransport: AudioManagerDelegate { func audioManagerDidChangeAvailableDevices(_ audioManager: AudioManager) { // Report available mics changed delegate?.onAvailableMicsUpdated(mics: getAllMics()) - + // Refresh what we should report as the selected mic refreshSelectedMicIfNeeded() } - + func audioManagerDidChangeAudioDevice(_ audioManager: AudioManager) { // nothing to do here } diff --git a/Sources/PipecatClientIOSSmallWebrtc/extensions/NSLayoutConstraint+Extensions.swift b/Sources/PipecatClientIOSSmallWebrtc/extensions/NSLayoutConstraint+Extensions.swift index eecff35..9b34e23 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/extensions/NSLayoutConstraint+Extensions.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/extensions/NSLayoutConstraint+Extensions.swift @@ -29,7 +29,7 @@ extension NSLayoutConstraint { view.widthAnchor.constraint(lessThanOrEqualTo: superview.widthAnchor), view.heightAnchor.constraint(lessThanOrEqualTo: superview.heightAnchor), view.widthAnchor.constraint(equalTo: superview.widthAnchor).priority(.defaultHigh), - view.heightAnchor.constraint(equalTo: superview.heightAnchor).priority(.defaultHigh), + view.heightAnchor.constraint(equalTo: superview.heightAnchor).priority(.defaultHigh) ] } @@ -48,7 +48,7 @@ extension NSLayoutConstraint { view.widthAnchor.constraint(greaterThanOrEqualTo: superview.widthAnchor), view.heightAnchor.constraint(greaterThanOrEqualTo: superview.heightAnchor), view.widthAnchor.constraint(equalTo: superview.widthAnchor).priority(.defaultHigh), - view.heightAnchor.constraint(equalTo: superview.heightAnchor).priority(.defaultHigh), + view.heightAnchor.constraint(equalTo: superview.heightAnchor).priority(.defaultHigh) ] } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCAudioTrack.swift b/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCAudioTrack.swift index 61ab324..f1169e9 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCAudioTrack.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCAudioTrack.swift @@ -2,7 +2,10 @@ import WebRTC import PipecatClientIOS extension RTCAudioTrack { - func toRtvi() -> MediaTrackId { - return MediaTrackId(id: trackId) + func toRtvi() -> PipecatClientIOS.MediaStreamTrack { + return MediaStreamTrack( + id: MediaTrackId(id: trackId), + kind: .audio + ) } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCVideoTrack.swift b/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCVideoTrack.swift index 161f855..ce76fc0 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCVideoTrack.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/extensions/RTCVideoTrack.swift @@ -2,7 +2,10 @@ import WebRTC import PipecatClientIOS extension RTCVideoTrack { - func toRtvi() -> MediaTrackId { - return MediaTrackId(id: trackId) + func toRtvi() -> PipecatClientIOS.MediaStreamTrack { + return MediaStreamTrack( + id: MediaTrackId(id: trackId), + kind: .video + ) } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTVIExtensions.swift b/Sources/PipecatClientIOSSmallWebrtc/extensions/RTVIExtensions.swift deleted file mode 100644 index 1f5c6e1..0000000 --- a/Sources/PipecatClientIOSSmallWebrtc/extensions/RTVIExtensions.swift +++ /dev/null @@ -1,37 +0,0 @@ -import Foundation -import PipecatClientIOS - -extension [ServiceConfig] { - var serverUrl: String? { - let apiKeyOption = serviceConfig?.options.first { $0.name == "server_url" } - if case let .string(apiKey) = apiKeyOption?.value { - return apiKey - } - return nil - } - - var serviceConfig: ServiceConfig? { - first { $0.service == SmallWebRTCTransport.SERVICE_NAME } - } -} - -extension Value { - var asObject: [String: Value] { - if case .object(let dict) = self { - return dict - } - return [:] - } - - var asString: String { - if case .object(_) = self { - do { - let jsonData = try JSONEncoder().encode(self) - return String(data: jsonData, encoding: .utf8)! - } catch {} - } else if case .string(let stringValue) = self { - return stringValue - } - return "" - } -} diff --git a/Sources/PipecatClientIOSSmallWebrtc/types/Device.swift b/Sources/PipecatClientIOSSmallWebrtc/types/Device.swift index 32af2db..bf2c963 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/types/Device.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/types/Device.swift @@ -5,9 +5,9 @@ import Foundation public enum DeviceKind: RawRepresentable, CaseIterable, Equatable, Hashable { case videoInput case audio(PortKind) - + public typealias RawValue = String - + public var rawValue: RawValue { switch self { case .videoInput: @@ -18,11 +18,11 @@ public enum DeviceKind: RawRepresentable, CaseIterable, Equatable, Hashable { return "audiooutput" } } - + static public var allCases: [DeviceKind] { [.videoInput, .audio(.input), .audio(.output)] } - + public init?(rawValue: RawValue) { switch rawValue { case "videoinput": @@ -47,15 +47,15 @@ public enum AudioDeviceType: String, RawRepresentable { case speakerphone case wired case earpiece - + public var deviceID: String { self.rawValue } - + public init?(deviceID: String) { self.init(rawValue: deviceID) } - + @_spi(Testing) public init?(sessionPort: AVAudioSession.Port) { switch sessionPort { @@ -78,7 +78,7 @@ extension DeviceKind: Codable { var container = encoder.singleValueContainer() try container.encode(self.rawValue) } - + public init(from decoder: Decoder) throws { let container = try decoder.singleValueContainer() let stringValue = try container.decode(String.self) @@ -99,13 +99,13 @@ public struct Device: Equatable { /// Identifier for the represented device that is persistent across application launches. public let deviceID: String public let groupID: String - + /// Enumerated value specifying the device kind. public let kind: DeviceKind - + /// A label describing this device (e.g. "External USB Webcam"). public let label: String - + @_spi(Testing) public init( deviceID: String, diff --git a/Sources/PipecatClientIOSSmallWebrtc/types/SessionDescription.swift b/Sources/PipecatClientIOSSmallWebrtc/types/SessionDescription.swift index 987d990..fbb1c8d 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/types/SessionDescription.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/types/SessionDescription.swift @@ -4,11 +4,11 @@ import WebRTC /// This enum is a swift wrapper over `RTCSdpType` for easy encode and decode enum SdpType: String, Codable { case offer, prAnswer, answer, rollback - + var rtcSdpType: RTCSdpType { switch self { - case .offer: return .offer - case .answer: return .answer + case .offer: return .offer + case .answer: return .answer case .prAnswer: return .prAnswer case .rollback: return .rollback } @@ -17,26 +17,34 @@ enum SdpType: String, Codable { /// This struct is a swift wrapper over `RTCSessionDescription` for easy encode and decode struct SmallWebRTCSessionDescription: Codable { - + var sdp: String var pc_id: String? let type: SdpType // We are not handling this case in the iOS SDK yet. - let restart_pc: Bool = false - + var restart_pc: Bool = false + init(from rtcSessionDescription: RTCSessionDescription) { self.sdp = rtcSessionDescription.sdp - + switch rtcSessionDescription.type { - case .offer: self.type = .offer + case .offer: self.type = .offer case .prAnswer: self.type = .prAnswer - case .answer: self.type = .answer + case .answer: self.type = .answer case .rollback: self.type = .rollback @unknown default: fatalError("Unknown RTCSessionDescription type: \(rtcSessionDescription.type.rawValue)") } } - + + init(from decoder: any Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + self.sdp = try container.decode(String.self, forKey: .sdp) + self.pc_id = try container.decodeIfPresent(String.self, forKey: .pc_id) + self.type = try container.decode(SdpType.self, forKey: .type) + self.restart_pc = try container.decodeIfPresent(Bool.self, forKey: .restart_pc) ?? false + } + var rtcSessionDescription: RTCSessionDescription { return RTCSessionDescription(type: self.type.rtcSdpType, sdp: self.sdp) } diff --git a/Sources/PipecatClientIOSSmallWebrtc/types/SignallingMessage.swift b/Sources/PipecatClientIOSSmallWebrtc/types/SignallingMessage.swift index a39921f..05d0d83 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/types/SignallingMessage.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/types/SignallingMessage.swift @@ -1,9 +1,104 @@ import Foundation -// Constant for signalling type +// MARK: - Base Protocols + public let SIGNALLING_TYPE = "signalling" -// Enum for signalling messages -public enum SignallingMessage: String, Codable { - case renegotiate = "renegotiate" +/// Common protocol for all signalling messages +protocol SignallingMessageProtocol: Codable { + var type: String { get } +} + +/// Marker protocol for inbound messages (received from remote peer/server) +protocol InboundSignallingMessageProtocol: SignallingMessageProtocol {} + +/// Marker protocol for outbound messages (sent to remote peer/server) +protocol OutboundSignallingMessageProtocol: SignallingMessageProtocol {} + +// MARK: - Outbound Messages + +struct TrackStatusMessage: OutboundSignallingMessageProtocol { + let type = "trackStatus" + let receiverIndex: Int + let enabled: Bool + + enum CodingKeys: String, CodingKey { + case type + case receiverIndex = "receiver_index" + case enabled + } +} + +/// Wraps any signalling message (inbound or outbound) +struct OutboundSignallingMessage: Encodable { + let type = "signalling" + let message: SignallingMessageProtocol + + enum CodingKeys: String, CodingKey { + case type + case message + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(type, forKey: .type) + + // Encode based on concrete type + switch message { + case let trackStatus as TrackStatusMessage: + try container.encode(trackStatus, forKey: .message) + default: + let context = EncodingError.Context( + codingPath: encoder.codingPath, + debugDescription: "Unsupported signalling message type: \(Swift.type(of: message))" + ) + throw EncodingError.invalidValue(message, context) + } + } +} + +// MARK: - Inbound Messages + +struct RenegotiateMessage: InboundSignallingMessageProtocol { + let type = "renegotiate" +} + +struct PeerLeftMessage: InboundSignallingMessageProtocol { + let type = "peerLeft" +} + +enum InboundSignallingMessage: Decodable { + case renegotiate(RenegotiateMessage) + case peerLeft(PeerLeftMessage) + + enum CodingKeys: String, CodingKey { + case type + } + + enum MessageType: String { + case renegotiate + case peerLeft + } + + init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let typeString = try container.decode(String.self, forKey: .type) + + guard let type = MessageType(rawValue: typeString) else { + throw DecodingError.dataCorruptedError( + forKey: .type, + in: container, + debugDescription: "Unknown inbound message type: \(typeString)" + ) + } + + switch type { + case .renegotiate: + let message = try RenegotiateMessage(from: decoder) + self = .renegotiate(message) + case .peerLeft: + let message = try PeerLeftMessage(from: decoder) + self = .peerLeft(message) + } + } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/types/SmallWebRTCTransportConnectionParams.swift b/Sources/PipecatClientIOSSmallWebrtc/types/SmallWebRTCTransportConnectionParams.swift new file mode 100644 index 0000000..125a16e --- /dev/null +++ b/Sources/PipecatClientIOSSmallWebrtc/types/SmallWebRTCTransportConnectionParams.swift @@ -0,0 +1,21 @@ +import Foundation +import PipecatClientIOS + +public struct SmallWebRTCTransportConnectionParams: TransportConnectionParams { + + let webrtcRequestParams: APIRequest + + enum CodingKeys: CodingKey { + case webrtcRequestParams + } + + public init(from decoder: any Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + self.webrtcRequestParams = try container.decode(APIRequest.self, forKey: .webrtcRequestParams) + } + + public init(webrtcRequestParams: APIRequest) { + self.webrtcRequestParams = webrtcRequestParams + } + +} diff --git a/Sources/PipecatClientIOSSmallWebrtc/util/AudioManager.swift b/Sources/PipecatClientIOSSmallWebrtc/util/AudioManager.swift index 07f32da..4137fc5 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/util/AudioManager.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/util/AudioManager.swift @@ -6,80 +6,80 @@ protocol AudioManagerDelegate: AnyObject { } final class AudioManager { - internal weak var delegate: AudioManagerDelegate? = nil - + internal weak var delegate: AudioManagerDelegate? + /// user's explicitly preferred device. /// nil means "current system default". - internal var preferredAudioDevice: AudioDeviceType? = nil { + internal var preferredAudioDevice: AudioDeviceType? { didSet { if self.preferredAudioDevice != oldValue { self.configureAudioSessionIfNeeded() } } } - + /// the actual audio device in use. internal var audioDevice: AudioDeviceType? - + /// the user's preferred device, if it's available, or nil—signifying "current system default"—otherwise. /// this is the basis of the selectedMic() exposed to the user, matching the Daily transport's behavior. internal var preferredAudioDeviceIfAvailable: AudioDeviceType? { self.preferredAudioDeviceIsAvailable(preferredAudioDevice) ? self.preferredAudioDevice : nil } - + /// the set of available devices on the system. internal var availableDevices: [Device] = [] - + private var isManaging: Bool = false private let notificationCenter: NotificationCenter - + // The AVAudioSession class is only available as a singleton: // https://developer.apple.com/documentation/avfaudio/avaudiosession/1648777-init private let audioSession: AVAudioSession = .sharedInstance() - + private var availableDevicesPollTimer: Timer? - + private static var defaultDevice: AudioDeviceType { .speakerphone } - + internal convenience init() { self.init( notificationCenter: .default ) } - + internal init( notificationCenter: NotificationCenter ) { self.notificationCenter = notificationCenter self.addNotificationObservers() } - + // MARK: - API - + func startManagingIfNecessary() { guard !self.isManaging else { return } self.startManaging() } - + func startManaging() { if self.isManaging { // nothing to do here return } - + self.isManaging = true - + // Set initial device state (audioDevice and availableDevices) and configure the audio // session if needed. // Note: initial state after startManaging() does not represent a "change", so don't fire // callbacks self.refreshAvailableDevices(suppressDelegateCallbacks: true) self.configureAudioSessionIfNeeded(suppressDelegateCallbacks: true) - + // Start polling for changes to available devices self.availableDevicesPollTimer = Timer.scheduledTimer( withTimeInterval: 1, @@ -91,25 +91,25 @@ final class AudioManager { // fact, avoiding calling it avoids unnecessary repeated attempts at reconfiguration. } } - + func stopManaging() { if !self.isManaging { // nothing to do here return } - + self.isManaging = false - + // Stop polling for changes to available devices self.availableDevicesPollTimer?.invalidate() - + // Reset device state self.availableDevices = [] self.audioDevice = nil } - + // MARK: - Notifications - + private func addNotificationObservers() { self.notificationCenter.addObserver( self, @@ -117,7 +117,7 @@ final class AudioManager { name: AVAudioSession.routeChangeNotification, object: self.audioSession ) - + self.notificationCenter.addObserver( self, selector: #selector(mediaServicesWereReset(_:)), @@ -125,24 +125,24 @@ final class AudioManager { object: self.audioSession ) } - + @objc private func routeDidChange(_ notification: Notification) { refreshAvailableDevices() configureAudioSessionIfNeeded() } - + @objc private func mediaServicesWereReset(_ notification: Notification) { self.configureAudioSessionIfNeeded() } - + // MARK: - Configuration - + private func configureAudioSessionIfNeeded(suppressDelegateCallbacks: Bool = false) { // Do nothing if we still not in a call if !self.isManaging { return } - + do { // If the current audio device is not the one we want... // @@ -153,7 +153,7 @@ final class AudioManager { if self.getCurrentAudioDevice() != self.preferredAudioDevice { // Apply desired configuration try self.applyConfiguration() - + // Check whether we've switched to a new audio device let newAudioDevice = getCurrentAudioDevice() if audioDevice != newAudioDevice { @@ -167,11 +167,11 @@ final class AudioManager { Logger.shared.error("Error configuring audio session") } } - + private func preferredAudioDeviceIsAvailable(_ preferredAudioDevice: AudioDeviceType?) -> Bool { var targetPortTypes: [AVAudioSession.Port] - var invert = false // whether to check whether targetPortTypes are *not* available - + var invert = false // whether to check whether targetPortTypes are *not* available + switch preferredAudioDevice { case .wired?, .earpiece?: targetPortTypes = [.headphones, .headsetMic] @@ -186,33 +186,35 @@ final class AudioManager { case nil: return false } - + var hasTargetPortType = false if let availableInputs = self.audioSession.availableInputs { hasTargetPortType = availableInputs.contains { targetPortTypes.contains($0.portType) } } - hasTargetPortType = hasTargetPortType || self.audioSession.currentRoute.outputs.contains { targetPortTypes.contains($0.portType) } + hasTargetPortType = + hasTargetPortType + || self.audioSession.currentRoute.outputs.contains { targetPortTypes.contains($0.portType) } return invert ? !hasTargetPortType : hasTargetPortType } - + // swiftlint:disable:next function_body_length cyclomatic_complexity internal func applyConfiguration() throws { let session = self.audioSession - + var sessionMode: AVAudioSession.Mode = .voiceChat let sessionCategory: AVAudioSession.Category = .playAndRecord - + // Mixing audio with other apps allows this app to stay alive in the background during // a call (assuming it has the voip background mode set). // After iOS 16, we must also always keep the bluetooth option here, otherwise // we are not able to see the bluetooth devices on the list var sessionCategoryOptions: AVAudioSession.CategoryOptions = [ .allowBluetooth, - .mixWithOthers, + .mixWithOthers ] - + let preferredDeviceToUse = preferredAudioDeviceIfAvailable - + switch preferredDeviceToUse { case .speakerphone?: sessionCategoryOptions.insert(.defaultToSpeaker) @@ -222,7 +224,7 @@ final class AudioManager { case nil: sessionMode = AVAudioSession.Mode.videoChat } - + do { try session.setCategory( sessionCategory, @@ -232,7 +234,7 @@ final class AudioManager { } catch { Logger.shared.error("Error configuring audio session") } - + let preferredInput: AVAudioSessionPortDescription? let overriddenOutputAudioPort: AVAudioSession.PortOverride switch preferredDeviceToUse { @@ -261,7 +263,7 @@ final class AudioManager { preferredInput = nil overriddenOutputAudioPort = .none } - + do { try session.overrideOutputAudioPort(overriddenOutputAudioPort) } catch let error { @@ -275,14 +277,14 @@ final class AudioManager { } } } - + // MARK: - Available Devices - + private func refreshAvailableDevices(suppressDelegateCallbacks: Bool = false) { if !isManaging { return } - + // Check for change in available devices let newAvailableDevices = getAvailableDevices() if availableDevices != newAvailableDevices { @@ -292,27 +294,27 @@ final class AudioManager { } } } - + private func getCurrentAudioDevice() -> AudioDeviceType { let defaultDevice: AudioDeviceType = Self.defaultDevice - + guard let firstOutput = self.audioSession.currentRoute.outputs.first else { return defaultDevice } - + guard let audioDevice = AudioDeviceType(sessionPort: firstOutput.portType) else { return defaultDevice } - + return audioDevice } - + // Adapted from WebrtcDevicesManager in Daily private func getAvailableDevices() -> [Device] { let audioSession = self.audioSession let availableInputs = audioSession.availableInputs ?? [] let availableOutputs = audioSession.currentRoute.outputs - + var deviceTypes = availableInputs.compactMap { input in AudioDeviceType(sessionPort: input.portType) } @@ -323,7 +325,7 @@ final class AudioManager { } else { deviceTypes.append(AudioDeviceType.speakerphone) } - + // When we are using bluetooth as the default route, // iOS does not list the bluetooth device on the list of availableInputs let outputDevice = availableOutputs.first.flatMap { AudioDeviceType(sessionPort: $0.portType) } @@ -332,7 +334,7 @@ final class AudioManager { deviceTypes.append(outputDevice) } } - + // bluetooth and earpiece should only be available in case we don't have a wired headset plugged // otherwise we can never change the route to bluetooth or earpiece, iOS does not respect that if deviceTypes.contains(AudioDeviceType.wired) { @@ -340,41 +342,42 @@ final class AudioManager { device != AudioDeviceType.bluetooth && device != AudioDeviceType.earpiece } } - + // NOTE: we use .input for the kind of all of these, since we only care about reporting mics - return deviceTypes.map { deviceType in - switch deviceType { - case .bluetooth: - return .init( - deviceID: deviceType.deviceID, - groupID: "", - kind: .audio(.input), - label: "Bluetooth Speaker & Mic" - ) - case .speakerphone: - return .init( - deviceID: deviceType.deviceID, - groupID: "", - kind: .audio(.input), - label: "Built-in Speaker & Mic" - ) - case .wired: - return .init( - deviceID: deviceType.deviceID, - groupID: "", - kind: .audio(.input), - label: "Wired Speaker & Mic" - ) - case .earpiece: - return .init( - deviceID: deviceType.deviceID, - groupID: "", - kind: .audio(.input), - label: "Built-in Earpiece & Mic" - ) + return + deviceTypes.map { deviceType in + switch deviceType { + case .bluetooth: + return .init( + deviceID: deviceType.deviceID, + groupID: "", + kind: .audio(.input), + label: "Bluetooth Speaker & Mic" + ) + case .speakerphone: + return .init( + deviceID: deviceType.deviceID, + groupID: "", + kind: .audio(.input), + label: "Built-in Speaker & Mic" + ) + case .wired: + return .init( + deviceID: deviceType.deviceID, + groupID: "", + kind: .audio(.input), + label: "Wired Speaker & Mic" + ) + case .earpiece: + return .init( + deviceID: deviceType.deviceID, + groupID: "", + kind: .audio(.input), + label: "Built-in Earpiece & Mic" + ) + } } - } - // A stable order helps us detect when available devices have changed - .sorted(by: { $0.deviceID < $1.deviceID }) + // A stable order helps us detect when available devices have changed + .sorted(by: { $0.deviceID < $1.deviceID }) } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/util/Logger.swift b/Sources/PipecatClientIOSSmallWebrtc/util/Logger.swift index b77e63c..f404cc6 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/util/Logger.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/util/Logger.swift @@ -43,52 +43,52 @@ public func setLogLevel(_ logLevel: LogLevel) { internal final class Logger { fileprivate var level: LogLevel = .warn - + fileprivate let osLog: OSLog = .init(subsystem: "co.daily.pipecat.SmallWebRTC", category: "main") - + internal static let shared: Logger = .init() - + @inlinable internal func error(_ message: @autoclosure () -> String) { self.log(.error, message()) } - + @inlinable internal func warn(_ message: @autoclosure () -> String) { self.log(.warn, message()) } - + @inlinable internal func info(_ message: @autoclosure () -> String) { self.log(.info, message()) } - + @inlinable internal func debug(_ message: @autoclosure () -> String) { self.log(.debug, message()) } - + @inlinable internal func trace(_ message: @autoclosure () -> String) { self.log(.trace, message()) } - + @inlinable internal func log(_ level: LogLevel, _ message: @autoclosure () -> String) { guard self.level.rawValue >= level.rawValue else { return } - + guard self.level != .off else { return } - + let log = self.osLog - + // The following force-unwrap is okay since we check for `.off` above: // swiftlint:disable:next force_unwrapping let type = level.logType! - + os_log("%@", log: log, type: type, message()) } } diff --git a/Sources/PipecatClientIOSSmallWebrtc/util/SmallWebRTCTransportTrackExtension.swift b/Sources/PipecatClientIOSSmallWebrtc/util/SmallWebRTCTransportTrackExtension.swift new file mode 100644 index 0000000..3c03bfc --- /dev/null +++ b/Sources/PipecatClientIOSSmallWebrtc/util/SmallWebRTCTransportTrackExtension.swift @@ -0,0 +1,130 @@ +import Foundation +import PipecatClientIOS + +// MARK: - Track Change Detection +extension SmallWebRTCTransport { + + func handleTrackChanges(previous: Tracks, current: Tracks) { + // Local participant changes + compareParticipantTracks( + previous: previous.local, + current: current.local, + participant: nil // Local participant + ) + + // Bot participant changes + compareParticipantTracks( + previous: previous.bot, + current: current.bot, + participant: self.connectedBotParticipant + ) + } + + func handleInitialTracks(tracks: Tracks) { + // Notify for local tracks + notifyParticipantTracksStarted(tracks: tracks.local, participant: nil) + + // Notify for bot tracks + if let botTracks = tracks.bot { + notifyParticipantTracksStarted(tracks: botTracks, participant: self.connectedBotParticipant) + } + } + + private func compareParticipantTracks( + previous: ParticipantTracks?, + current: ParticipantTracks?, + participant: Participant? + ) { + let prev = previous ?? ParticipantTracks(audio: nil, video: nil, screenAudio: nil, screenVideo: nil) + let curr = current ?? ParticipantTracks(audio: nil, video: nil, screenAudio: nil, screenVideo: nil) + + // Check audio track changes + compareTrack( + previous: prev.audio, + current: curr.audio, + participant: participant, + isScreen: false + ) + + // Check video track changes + compareTrack( + previous: prev.video, + current: curr.video, + participant: participant, + isScreen: false + ) + + // Check screen audio track changes + compareTrack( + previous: prev.screenAudio, + current: curr.screenAudio, + participant: participant, + isScreen: true + ) + + // Check screen video track changes + compareTrack( + previous: prev.screenVideo, + current: curr.screenVideo, + participant: participant, + isScreen: true + ) + } + + private func compareTrack( + previous: MediaStreamTrack?, + current: MediaStreamTrack?, + participant: Participant?, + isScreen: Bool + ) { + // Track stopped (was present, now absent) + if let prevTrack = previous, current == nil { + if isScreen { + delegate?.onScreenTrackStopped(track: prevTrack, participant: participant) + } else { + delegate?.onTrackStopped(track: prevTrack, participant: participant) + } + } + + // Track started (was absent, now present) + if previous == nil, let currTrack = current { + if isScreen { + delegate?.onScreenTrackStarted(track: currTrack, participant: participant) + } else { + delegate?.onTrackStarted(track: currTrack, participant: participant) + } + } + + // Track changed (different track IDs) + if let prevTrack = previous, + let currTrack = current, + prevTrack.id != currTrack.id { + // Stop the old track and start the new one + if isScreen { + delegate?.onScreenTrackStopped(track: prevTrack, participant: participant) + delegate?.onScreenTrackStarted(track: currTrack, participant: participant) + } else { + delegate?.onTrackStopped(track: prevTrack, participant: participant) + delegate?.onTrackStarted(track: currTrack, participant: participant) + } + } + } + + private func notifyParticipantTracksStarted(tracks: ParticipantTracks, participant: Participant?) { + if let audioTrack = tracks.audio { + delegate?.onTrackStarted(track: audioTrack, participant: participant) + } + + if let videoTrack = tracks.video { + delegate?.onTrackStarted(track: videoTrack, participant: participant) + } + + if let screenAudioTrack = tracks.screenAudio { + delegate?.onScreenTrackStarted(track: screenAudioTrack, participant: participant) + } + + if let screenVideoTrack = tracks.screenVideo { + delegate?.onScreenTrackStarted(track: screenVideoTrack, participant: participant) + } + } +} diff --git a/Sources/PipecatClientIOSSmallWebrtc/util/VideoManager.swift b/Sources/PipecatClientIOSSmallWebrtc/util/VideoManager.swift index 00a7631..de37d31 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/util/VideoManager.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/util/VideoManager.swift @@ -1,5 +1,5 @@ import Foundation -@_implementationOnly import WebRTC +import WebRTC internal class VideoManager { diff --git a/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoView.swift b/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoView.swift index 8cd363e..17ebaae 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoView.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoView.swift @@ -2,16 +2,16 @@ import PipecatClientIOS /// Overrides the WebRTC [VideoView] to allow [MediaTrackId] tracks from the VoiceClient to be rendered. public final class SmallWebRTCVideoView: VideoView { - + /// Displays the specified [MediaTrackId] in this view. public var videoTrack: MediaTrackId? { get { guard let track = self.track else { return nil } - return track.toRtvi() + return track.toRtvi().id } set { self.track = newValue.flatMap { VideoTrackRegistry.getTrack(mediaTrackId: $0) } } } - + } diff --git a/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoViewSwiftUI.swift b/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoViewSwiftUI.swift index b30e8eb..680cad5 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoViewSwiftUI.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/view/SmallWebRTCVideoViewSwiftUI.swift @@ -3,7 +3,7 @@ import PipecatClientIOS /// A wrapper for `SmallWebRTCVideoView` that exposes the video size via a `@Binding`. public struct SmallWebRTCVideoViewSwiftUI: UIViewRepresentable { - + /// The current size of the video being rendered by this view. @Binding private(set) var videoSize: CGSize diff --git a/Sources/PipecatClientIOSSmallWebrtc/view/VideoView.swift b/Sources/PipecatClientIOSSmallWebrtc/view/VideoView.swift index 4bc7b08..51719b8 100644 --- a/Sources/PipecatClientIOSSmallWebrtc/view/VideoView.swift +++ b/Sources/PipecatClientIOSSmallWebrtc/view/VideoView.swift @@ -77,12 +77,12 @@ open class VideoView: UIView { } /// The view's delegate. - public weak var delegate: VideoViewDelegate? = nil + public weak var delegate: VideoViewDelegate? internal let rtcView: RTCMTLVideoView = .init(frame: .zero) private var contentModeConstraints: [NSLayoutConstraint] = [] - private var rtcDelegate: Delegate? = nil + private var rtcDelegate: Delegate? public override init(frame: CGRect) { super.init(frame: frame) diff --git a/scripts/formatCode.sh b/scripts/formatCode.sh new file mode 100755 index 0000000..2c1247b --- /dev/null +++ b/scripts/formatCode.sh @@ -0,0 +1,5 @@ +# Formatting the code +swift-format format Sources -i -r +# Checking for code practices and style conventions +swiftlint --fix Sources +