diff --git a/src/components/audio-stream/audio-stream-bottom-sheet.tsx b/src/components/audio-stream/audio-stream-bottom-sheet.tsx index 2880e7e..640ad11 100644 --- a/src/components/audio-stream/audio-stream-bottom-sheet.tsx +++ b/src/components/audio-stream/audio-stream-bottom-sheet.tsx @@ -245,4 +245,3 @@ export const AudioStreamBottomSheet = () => { ); }; - diff --git a/src/components/livekit/livekit-bottom-sheet.tsx b/src/components/livekit/livekit-bottom-sheet.tsx index 528d9ed..a2c0b63 100644 --- a/src/components/livekit/livekit-bottom-sheet.tsx +++ b/src/components/livekit/livekit-bottom-sheet.tsx @@ -32,6 +32,7 @@ export const LiveKitBottomSheet = () => { const { trackEvent } = useAnalytics(); const [currentView, setCurrentView] = useState(BottomSheetView.ROOM_SELECT); + const [previousView, setPreviousView] = useState(BottomSheetView.ROOM_SELECT); const [isMuted, setIsMuted] = useState(true); // Default to muted const [permissionsRequested, setPermissionsRequested] = useState(false); @@ -181,12 +182,15 @@ export const LiveKitBottomSheet = () => { }, [disconnectFromRoom]); const handleShowAudioSettings = useCallback(() => { + if (currentView !== BottomSheetView.AUDIO_SETTINGS) { + setPreviousView(currentView); + } setCurrentView(BottomSheetView.AUDIO_SETTINGS); - }, []); + }, [currentView]); const handleBackFromAudioSettings = useCallback(() => { - setCurrentView(BottomSheetView.CONNECTED); - }, []); + setCurrentView(previousView); + }, [previousView]); const renderRoomSelect = () => ( @@ -300,14 +304,12 @@ export const LiveKitBottomSheet = () => { - - + + {t('livekit.title')} - {currentView === BottomSheetView.CONNECTED && ( - - - - )} + + + @@ -323,7 +325,6 @@ const styles = StyleSheet.create({ content: { flex: 1, width: '100%', - paddingHorizontal: 16, }, roomList: { flex: 1, diff --git a/src/components/personnel/personnel-card.tsx b/src/components/personnel/personnel-card.tsx index 9cccf5d..fc4a785 100644 --- a/src/components/personnel/personnel-card.tsx +++ b/src/components/personnel/personnel-card.tsx @@ -13,8 +13,6 @@ import { HStack } from '../ui/hstack'; import { Text } from '../ui/text'; import { VStack } from '../ui/vstack'; - - interface PersonnelCardProps { personnel: PersonnelInfoResultData; onPress: (id: string) => void; diff --git a/src/components/settings/audio-device-selection.tsx b/src/components/settings/audio-device-selection.tsx index 9a43305..ff3caf6 100644 --- a/src/components/settings/audio-device-selection.tsx +++ b/src/components/settings/audio-device-selection.tsx @@ -111,9 +111,17 @@ export const AudioDeviceSelection: React.FC = ({ show ); }; - const availableMicrophones = availableAudioDevices.filter((device) => (device.type === 'bluetooth' ? device.isAvailable : true)); - - const availableSpeakers = availableAudioDevices.filter((device) => device.isAvailable); + const availableMicrophones = availableAudioDevices.filter((device) => { + // Microphones include bluetooth, wired, and default input devices + // Specifially exclude devices that are explicitly speakers + return device.type === 'bluetooth' || device.type === 'wired' || device.type === 'default'; + }); + + const availableSpeakers = availableAudioDevices.filter((device) => { + // Speakers include bluetooth, wired, speaker, and default output devices + // Specifically exclude default microphone if it somehow gets tagged as output, though usually default output is 'speaker' or 'default' + return (device.type === 'bluetooth' || device.type === 'wired' || device.type === 'speaker' || device.type === 'default') && device.id !== 'default-mic'; + }); return ( diff --git a/src/stores/app/audio-stream-store.ts b/src/stores/app/audio-stream-store.ts index e919a56..d6d625b 100644 --- a/src/stores/app/audio-stream-store.ts +++ b/src/stores/app/audio-stream-store.ts @@ -32,7 +32,7 @@ interface AudioStreamState { // Stream operations fetchAvailableStreams: () => Promise; playStream: (stream: DepartmentAudioResultStreamData) => Promise; - stopStream: () => Promise; + stopStream: (clearState?: boolean) => Promise; cleanup: () => Promise; } @@ -79,13 +79,18 @@ export const useAudioStreamStore = create((set, get) => ({ try { const { soundObject: currentSound, stopStream } = get(); - // Stop current stream if playing + // Optimistically set the current stream and loading state + set({ + currentStream: stream, + isLoading: true, + isBuffering: true, + }); + + // Stop current stream if playing, but don't clear the state since we just set it if (currentSound) { - await stopStream(); + await stopStream(false); } - set({ isLoading: true, isBuffering: true }); - logger.debug({ message: 'Starting audio stream', context: { streamName: stream.Name, streamUrl: stream.Url }, @@ -189,12 +194,10 @@ export const useAudioStreamStore = create((set, get) => ({ isLoading: false, isBuffering: false, }); - - } }, - stopStream: async () => { + stopStream: async (clearState = true) => { try { const { soundObject, currentStream } = get(); @@ -208,13 +211,21 @@ export const useAudioStreamStore = create((set, get) => ({ }); } - set({ - soundObject: null, - currentStream: null, - isPlaying: false, - isLoading: false, - isBuffering: false, - }); + if (clearState) { + set({ + soundObject: null, + currentStream: null, + isPlaying: false, + isLoading: false, + isBuffering: false, + }); + } else { + // If not clearing state, just clear the sound object + set({ + soundObject: null, + isPlaying: false, + }); + } } catch (error) { logger.error({ message: 'Failed to stop audio stream', diff --git a/src/stores/app/livekit-store.ts b/src/stores/app/livekit-store.ts index 33e9254..5d80503 100644 --- a/src/stores/app/livekit-store.ts +++ b/src/stores/app/livekit-store.ts @@ -1,5 +1,6 @@ import notifee, { AndroidImportance } from '@notifee/react-native'; import { getRecordingPermissionsAsync, requestRecordingPermissionsAsync } from 'expo-audio'; +import { Audio } from 'expo-av'; import { Room, RoomEvent } from 'livekit-client'; import { Platform } from 'react-native'; import { create } from 'zustand'; @@ -12,44 +13,54 @@ import { headsetButtonService } from '../../services/headset-button.service'; import { toggleMicrophone } from '../../utils/microphone-toggle'; import { useBluetoothAudioStore } from './bluetooth-audio-store'; +// Helper function to setup audio routing based on selected devices // Helper function to setup audio routing based on selected devices const setupAudioRouting = async (room: Room): Promise => { try { const bluetoothStore = useBluetoothAudioStore.getState(); - const { selectedAudioDevices, connectedDevice } = bluetoothStore; + const { selectedAudioDevices } = bluetoothStore; + const speaker = selectedAudioDevices.speaker; + const microphone = selectedAudioDevices.microphone; + + logger.info({ + message: 'Setting up audio routing', + context: { + speakerType: speaker?.type, + speakerName: speaker?.name, + micType: microphone?.type, + }, + }); - // If we have a connected Bluetooth device, prioritize it - if (connectedDevice && connectedDevice.hasAudioCapability) { - logger.info({ - message: 'Using Bluetooth device for audio routing', - context: { deviceName: connectedDevice.name }, - }); + if (Platform.OS === 'android' || Platform.OS === 'ios') { + // Default configuration for voice call + const audioModeConfig: any = { + allowsRecordingIOS: true, + staysActiveInBackground: true, + playsInSilentModeIOS: true, + shouldDuckAndroid: true, + // Default to earpiece unless speaker is explicitly selected + playThroughEarpieceAndroid: true, + }; - // Update selected devices to use Bluetooth - const deviceName = connectedDevice.name || 'Bluetooth Device'; - const bluetoothMicrophone = connectedDevice.supportsMicrophoneControl ? { id: connectedDevice.id, name: deviceName, type: 'bluetooth' as const, isAvailable: true } : selectedAudioDevices.microphone; + // If speaker device is selected (explicitly 'speaker' type), force speaker output + if (speaker?.type === 'speaker') { + logger.debug({ message: 'Routing audio to Speakerphone' }); + audioModeConfig.playThroughEarpieceAndroid = false; - const bluetoothSpeaker = { - id: connectedDevice.id, - name: deviceName, - type: 'bluetooth' as const, - isAvailable: true, - }; + // On iOS, we might need to handle this differently if we wanted to force speaker, + // but typically standard routing handles it or AVRoutePickerView is used. + // For Expo AV, we can sometimes influence it. + } else { + logger.debug({ message: 'Routing audio to Earpiece/Headset' }); + audioModeConfig.playThroughEarpieceAndroid = true; + } - bluetoothStore.setSelectedMicrophone(bluetoothMicrophone); - bluetoothStore.setSelectedSpeaker(bluetoothSpeaker); + await Audio.setAudioModeAsync(audioModeConfig); + } - // Note: Actual audio routing would be implemented via native modules - // This is a placeholder for the audio routing logic - logger.debug({ - message: 'Audio routing configured for Bluetooth device', - }); - } else { - // Use default audio devices (selected devices or default) - logger.debug({ - message: 'Using default audio devices', - context: { selectedAudioDevices }, - }); + // Handle LiveKit specific device switching if needed (mostly for web/desktop, but good to have) + if (speaker?.id && speaker.id !== 'default-speaker' && speaker.type === 'bluetooth') { + // logic for specific bluetooth device selection if feasible } } catch (error) { logger.error({ @@ -466,3 +477,13 @@ export const useLiveKitStore = create((set, get) => ({ } }, })); + +// Subscribe to bluetooth store changes to trigger audio routing updates +useBluetoothAudioStore.subscribe((state, prevState) => { + if (state.selectedAudioDevices !== prevState.selectedAudioDevices) { + const room = useLiveKitStore.getState().currentRoom; + if (room) { + setupAudioRouting(room); + } + } +});