Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions samples/CameraAccess/CameraAccess.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
8FD96B7F2E6F0A9800F56AB1 /* CameraAccessApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8FD96B792E6F0A9800F56AB1 /* CameraAccessApp.swift */; };
8FD96B812E6F0A9800F56AB1 /* HomeScreenView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8FD96B722E6F0A9800F56AB1 /* HomeScreenView.swift */; };
8FD96B872E6F0A9800F56AB1 /* StreamSessionViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8FD96B6F2E6F0A9800F56AB1 /* StreamSessionViewModel.swift */; };
9DD6CC002F4A000000ED7098 /* VideoDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD6CBFF2F4A000000ED7098 /* VideoDecoder.swift */; };
8FD96B882E6F0A9800F56AB1 /* StreamSessionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8FD96B752E6F0A9800F56AB1 /* StreamSessionView.swift */; };
8FD96B8A2E6F0A9800F56AB1 /* PhotoPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8FD96B742E6F0A9800F56AB1 /* PhotoPreviewView.swift */; };
8FD96B8D2E6F0A9800F56AB1 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8FD96B772E6F0A9800F56AB1 /* Assets.xcassets */; };
Expand All @@ -36,10 +35,13 @@
9DD6CB092F3C637D00ED7098 /* WebRTCClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD6CB012F3C637D00ED7098 /* WebRTCClient.swift */; };
9DD6CB0C2F3C648800ED7098 /* WebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 9DD6CB0B2F3C648800ED7098 /* WebRTC */; };
9DD6CB0E2F3C64F400ED7098 /* WebRTCOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD6CB0D2F3C64F400ED7098 /* WebRTCOverlayView.swift */; };
9DD6CC002F4A000000ED7098 /* VideoDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD6CBFF2F4A000000ED7098 /* VideoDecoder.swift */; };
9DD894B22F4047630090B9B9 /* SettingsManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD894AF2F4047630090B9B9 /* SettingsManager.swift */; };
9DD894B32F4047630090B9B9 /* SettingsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD894B02F4047630090B9B9 /* SettingsView.swift */; };
9DD895962F405E0E0090B9B9 /* RTCVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD895952F405E0E0090B9B9 /* RTCVideoView.swift */; };
9DD895972F405E0E0090B9B9 /* PiPVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DD895942F405E0E0090B9B9 /* PiPVideoView.swift */; };
9DE0A3012F50000100AA0001 /* ShortcutLaunchCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DE0A3002F50000100AA0001 /* ShortcutLaunchCoordinator.swift */; };
9DE0A3032F50000100AA0001 /* StartIPhoneStreamingIntent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DE0A3022F50000100AA0001 /* StartIPhoneStreamingIntent.swift */; };
A1B2C3D42F0A000200000001 /* GeminiConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C3D42F0A000100000001 /* GeminiConfig.swift */; };
A1B2C3D42F0A000200000002 /* GeminiLiveService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C3D42F0A000100000002 /* GeminiLiveService.swift */; };
A1B2C3D42F0A000200000003 /* AudioManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C3D42F0A000100000003 /* AudioManager.swift */; };
Expand Down Expand Up @@ -81,7 +83,6 @@
8F2D237F2E856711002D0588 /* DebugMenuViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DebugMenuViewModel.swift; sourceTree = "<group>"; };
8F8F00772E8ACB4500A4BDAF /* WearablesViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WearablesViewModel.swift; sourceTree = "<group>"; };
8FD96B6F2E6F0A9800F56AB1 /* StreamSessionViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamSessionViewModel.swift; sourceTree = "<group>"; };
9DD6CBFF2F4A000000ED7098 /* VideoDecoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoDecoder.swift; sourceTree = "<group>"; };
8FD96B722E6F0A9800F56AB1 /* HomeScreenView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomeScreenView.swift; sourceTree = "<group>"; };
8FD96B742E6F0A9800F56AB1 /* PhotoPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoPreviewView.swift; sourceTree = "<group>"; };
8FD96B752E6F0A9800F56AB1 /* StreamSessionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamSessionView.swift; sourceTree = "<group>"; };
Expand All @@ -106,10 +107,13 @@
9DD6CB022F3C637D00ED7098 /* WebRTCConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCConfig.swift; sourceTree = "<group>"; };
9DD6CB032F3C637D00ED7098 /* WebRTCSessionViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCSessionViewModel.swift; sourceTree = "<group>"; };
9DD6CB0D2F3C64F400ED7098 /* WebRTCOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCOverlayView.swift; sourceTree = "<group>"; };
9DD6CBFF2F4A000000ED7098 /* VideoDecoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoDecoder.swift; sourceTree = "<group>"; };
9DD894AF2F4047630090B9B9 /* SettingsManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsManager.swift; sourceTree = "<group>"; };
9DD894B02F4047630090B9B9 /* SettingsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = "<group>"; };
9DD895942F405E0E0090B9B9 /* PiPVideoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPVideoView.swift; sourceTree = "<group>"; };
9DD895952F405E0E0090B9B9 /* RTCVideoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCVideoView.swift; sourceTree = "<group>"; };
9DE0A3002F50000100AA0001 /* ShortcutLaunchCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShortcutLaunchCoordinator.swift; sourceTree = "<group>"; };
9DE0A3022F50000100AA0001 /* StartIPhoneStreamingIntent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StartIPhoneStreamingIntent.swift; sourceTree = "<group>"; };
A1B2C3D42F0A000100000001 /* GeminiConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GeminiConfig.swift; sourceTree = "<group>"; };
A1B2C3D42F0A000100000002 /* GeminiLiveService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GeminiLiveService.swift; sourceTree = "<group>"; };
A1B2C3D42F0A000100000003 /* AudioManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioManager.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -207,6 +211,8 @@
9DD6CB042F3C637D00ED7098 /* WebRTC */,
9DD6CAAD2F3C426600ED7098 /* Secrets.swift */,
9DD6CAAE2F3C426600ED7098 /* Secrets.swift.example */,
9DE0A3002F50000100AA0001 /* ShortcutLaunchCoordinator.swift */,
9DE0A3022F50000100AA0001 /* StartIPhoneStreamingIntent.swift */,
9D3C69602F367CF700E641A5 /* iPhone */,
A1B2C3D42F0A000300000001 /* Gemini */,
8FD96B702E6F0A9800F56AB1 /* ViewModels */,
Expand Down Expand Up @@ -426,6 +432,8 @@
9DD895962F405E0E0090B9B9 /* RTCVideoView.swift in Sources */,
9DD895972F405E0E0090B9B9 /* PiPVideoView.swift in Sources */,
8F8F00782E8ACB4600A4BDAF /* WearablesViewModel.swift in Sources */,
9DE0A3012F50000100AA0001 /* ShortcutLaunchCoordinator.swift in Sources */,
9DE0A3032F50000100AA0001 /* StartIPhoneStreamingIntent.swift in Sources */,
E6A188482EB918740097D0E1 /* StreamView.swift in Sources */,
8FFD5FF52E8422580035E446 /* CircleButton.swift in Sources */,
8FFD5FF62E8422580035E446 /* CustomButton.swift in Sources */,
Expand Down
7 changes: 6 additions & 1 deletion samples/CameraAccess/CameraAccess/CameraAccessApp.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ struct CameraAccessApp: App {
// Debug menu for simulating device connections during development
@StateObject private var debugMenuViewModel = DebugMenuViewModel(mockDeviceKit: MockDeviceKit.shared)
#endif
@StateObject private var shortcutLaunchCoordinator = ShortcutLaunchCoordinator.shared
private let wearables: WearablesInterface
@StateObject private var wearablesViewModel: WearablesViewModel

Expand All @@ -49,7 +50,11 @@ struct CameraAccessApp: App {
WindowGroup {
// Main app view with access to the shared Wearables SDK instance
// The Wearables.shared singleton provides the core DAT API
MainAppView(wearables: Wearables.shared, viewModel: wearablesViewModel)
MainAppView(
wearables: Wearables.shared,
viewModel: wearablesViewModel,
shortcutLaunchCoordinator: shortcutLaunchCoordinator
)
// Show error alerts for view model failures
.alert("Error", isPresented: $wearablesViewModel.showError) {
Button("OK") {
Expand Down
30 changes: 30 additions & 0 deletions samples/CameraAccess/CameraAccess/ShortcutLaunchCoordinator.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import Foundation

enum ShortcutLaunchAction: Equatable {
case startIPhoneStreaming(startAISession: Bool)
}

struct ShortcutLaunchRequest: Equatable, Identifiable {
let id: UUID
let action: ShortcutLaunchAction
}

@MainActor
final class ShortcutLaunchCoordinator: ObservableObject {
static let shared = ShortcutLaunchCoordinator()

@Published private(set) var pendingRequest: ShortcutLaunchRequest?

func requestStartIPhoneStreaming(startAISession: Bool) {
pendingRequest = ShortcutLaunchRequest(
id: UUID(),
action: .startIPhoneStreaming(startAISession: startAISession)
)
}

func consumePendingRequest() -> ShortcutLaunchRequest? {
let request = pendingRequest
pendingRequest = nil
return request
}
}
43 changes: 43 additions & 0 deletions samples/CameraAccess/CameraAccess/StartIPhoneStreamingIntent.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import AppIntents

struct StartIPhoneStreamingIntent: AppIntent {
static var title: LocalizedStringResource = "Start on iPhone"
static var description = IntentDescription("Open VisionClaw and start iPhone streaming.")
static var openAppWhenRun = true

@Parameter(title: "Start AI Session", default: false)
var startAISession: Bool

init() {}

init(startAISession: Bool) {
self.startAISession = startAISession
}

@MainActor
func enqueueRequest(using coordinator: ShortcutLaunchCoordinator) {
coordinator.requestStartIPhoneStreaming(startAISession: startAISession)
}

func perform() async throws -> some IntentResult {
await MainActor.run {
enqueueRequest(using: .shared)
}
return .result()
}
}

struct VisionClawAppShortcuts: AppShortcutsProvider {
static var appShortcuts: [AppShortcut] {
AppShortcut(
intent: StartIPhoneStreamingIntent(),
phrases: [
"Start on iPhone in \(.applicationName)",
"Start iPhone streaming in \(.applicationName)",
"Open iPhone camera in \(.applicationName)"
],
shortTitle: "Start on iPhone",
systemImageName: "iphone"
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,20 @@ enum StreamingMode {
case iPhone
}

enum CameraCapturePolicy {
static func shouldStartLocalCameraCapture(
streamingMode: StreamingMode,
videoStreamingEnabled: Bool
) -> Bool {
switch streamingMode {
case .glasses:
return true
case .iPhone:
return videoStreamingEnabled
}
}
}

@MainActor
class StreamSessionViewModel: ObservableObject {
@Published var currentVideoFrame: UIImage?
Expand All @@ -48,6 +62,10 @@ class StreamSessionViewModel: ObservableObject {
streamingStatus != .stopped
}

var isVoiceOnlyIPhoneMode: Bool {
streamingMode == .iPhone && !SettingsManager.shared.videoStreamingEnabled
}

var resolutionLabel: String {
switch selectedResolution {
case .low: return "360x640"
Expand Down Expand Up @@ -286,6 +304,18 @@ class StreamSessionViewModel: ObservableObject {

private func startIPhoneSession() {
streamingMode = .iPhone
guard CameraCapturePolicy.shouldStartLocalCameraCapture(
streamingMode: streamingMode,
videoStreamingEnabled: SettingsManager.shared.videoStreamingEnabled
) else {
currentVideoFrame = nil
hasReceivedFirstFrame = false
iPhoneCameraManager = nil
streamingStatus = .streaming
NSLog("[Stream] iPhone voice-only mode started; local camera capture disabled")
return
}

let camera = IPhoneCameraManager()
camera.onFrameCaptured = { [weak self] image in
Task { @MainActor [weak self] in
Expand Down
30 changes: 24 additions & 6 deletions samples/CameraAccess/CameraAccess/Views/MainAppView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,36 @@ import SwiftUI
struct MainAppView: View {
let wearables: WearablesInterface
@ObservedObject private var viewModel: WearablesViewModel
@ObservedObject private var shortcutLaunchCoordinator: ShortcutLaunchCoordinator

init(wearables: WearablesInterface, viewModel: WearablesViewModel) {
init(
wearables: WearablesInterface,
viewModel: WearablesViewModel,
shortcutLaunchCoordinator: ShortcutLaunchCoordinator
) {
self.wearables = wearables
self.viewModel = viewModel
self.shortcutLaunchCoordinator = shortcutLaunchCoordinator
}

var body: some View {
if viewModel.registrationState == .registered || viewModel.hasMockDevice || viewModel.skipToIPhoneMode {
StreamSessionView(wearables: wearables, wearablesVM: viewModel)
} else {
// User not registered - show registration/onboarding flow
HomeScreenView(viewModel: viewModel)
Group {
if viewModel.registrationState == .registered || viewModel.hasMockDevice || viewModel.skipToIPhoneMode {
StreamSessionView(
wearables: wearables,
wearablesVM: viewModel,
shortcutLaunchCoordinator: shortcutLaunchCoordinator
)
} else {
// User not registered - show registration/onboarding flow
HomeScreenView(viewModel: viewModel)
}
}
.task(id: shortcutLaunchCoordinator.pendingRequest?.id) {
guard let request = shortcutLaunchCoordinator.pendingRequest else { return }
if case .startIPhoneStreaming = request.action {
viewModel.skipToIPhoneMode = true
}
}
}
}
38 changes: 36 additions & 2 deletions samples/CameraAccess/CameraAccess/Views/StreamSessionView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,19 @@ import UIKit
struct StreamSessionView: View {
let wearables: WearablesInterface
@ObservedObject private var wearablesViewModel: WearablesViewModel
@ObservedObject private var shortcutLaunchCoordinator: ShortcutLaunchCoordinator
@StateObject private var viewModel: StreamSessionViewModel
@StateObject private var geminiVM = GeminiSessionViewModel()
@StateObject private var webrtcVM = WebRTCSessionViewModel()

init(wearables: WearablesInterface, wearablesVM: WearablesViewModel) {
init(
wearables: WearablesInterface,
wearablesVM: WearablesViewModel,
shortcutLaunchCoordinator: ShortcutLaunchCoordinator
) {
self.wearables = wearables
self.wearablesViewModel = wearablesVM
self.shortcutLaunchCoordinator = shortcutLaunchCoordinator
self._viewModel = StateObject(wrappedValue: StreamSessionViewModel(wearables: wearables))
}

Expand All @@ -42,10 +48,14 @@ struct StreamSessionView: View {
viewModel.geminiSessionVM = geminiVM
viewModel.webrtcSessionVM = webrtcVM
geminiVM.streamingMode = viewModel.streamingMode
await handlePendingShortcutRequestIfNeeded()
}
.onChange(of: viewModel.streamingMode) { newMode in
.onChange(of: viewModel.streamingMode) { _, newMode in
geminiVM.streamingMode = newMode
}
.task(id: shortcutLaunchCoordinator.pendingRequest?.id) {
await handlePendingShortcutRequestIfNeeded()
}
.onAppear {
UIApplication.shared.isIdleTimerDisabled = true
}
Expand All @@ -60,4 +70,28 @@ struct StreamSessionView: View {
Text(viewModel.errorMessage)
}
}

private func handlePendingShortcutRequestIfNeeded() async {
guard let request = shortcutLaunchCoordinator.consumePendingRequest() else { return }

wearablesViewModel.skipToIPhoneMode = true

switch request.action {
case .startIPhoneStreaming(let startAISession):
if viewModel.streamingMode != .iPhone && geminiVM.isGeminiActive {
geminiVM.stopSession()
}

if viewModel.streamingMode != .iPhone && viewModel.isStreaming {
await viewModel.stopSession()
}

if !viewModel.isStreaming || viewModel.streamingMode != .iPhone {
await viewModel.handleStartIPhone()
}

guard startAISession, viewModel.streamingMode == .iPhone, viewModel.isStreaming else { return }
await geminiVM.startSession()
}
}
}
30 changes: 30 additions & 0 deletions samples/CameraAccess/CameraAccess/Views/StreamView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ struct StreamView: View {
.clipped()
}
.edgesIgnoringSafeArea(.all)
} else if viewModel.isVoiceOnlyIPhoneMode {
VoiceOnlyIPhoneView()
} else {
ProgressView()
.scaleEffect(1.5)
Expand Down Expand Up @@ -146,6 +148,34 @@ struct StreamView: View {
}
}

private struct VoiceOnlyIPhoneView: View {
var body: some View {
VStack(spacing: 14) {
Image(systemName: "mic.circle.fill")
.font(.system(size: 64, weight: .light))
.foregroundColor(.white.opacity(0.9))

Text("Voice-only mode")
.font(.system(size: 24, weight: .semibold))
.foregroundColor(.white)

Text("Video Streaming is disabled. The iPhone camera is not capturing frames.")
.font(.system(size: 15))
.foregroundColor(.white.opacity(0.72))
.multilineTextAlignment(.center)
.padding(.horizontal, 32)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(
LinearGradient(
colors: [Color.black, Color(red: 0.04, green: 0.08, blue: 0.10)],
startPoint: .top,
endPoint: .bottom
)
)
}
}

// Extracted controls for clarity
struct ControlsView: View {
@ObservedObject var viewModel: StreamSessionViewModel
Expand Down
Loading