feat(audio) [closes #230] Screenshare audio control (#248)

* feat(ui): present mic volume icon

* feat(ui): improve mic volume display

* refactor(ui): nest mic audio as a channel

* fix(ui): prevent volume control from reappearing for returning peers

* refactor(audio): update specific audio channel states

* refactor(audio): use enum for audio channel name

* refactor(types): improve audio type names

* feat(audio): wire up screen share audio

* refactor(networking): always provide stream metadata

* fix(audio): remove screen audio when stream ends

* fix(audio): stop audio when removing it

* feat(audio): show appropriate icon for channel

* fix(audio): clean up audio for leaving peers consistently

* fix(audio): use up-to-date peerAudios reference

* refactor(audio): simplify audio state updating

* refactor(audio): use functional setState to update peer list

* refactor(variables): rename peerAudios to peerAudioChannels

* refactor(types): consolidate stream types

* refactor(types): require stream type metadata
This commit is contained in:
Jeremy Kahn 2024-04-01 21:25:12 -05:00 committed by GitHub
parent 89abe718db
commit 05b4615af9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 342 additions and 119 deletions

View File

@ -1,16 +1,25 @@
import { useState, useEffect } from 'react'
import Slider from '@mui/material/Slider'
import Box from '@mui/material/Box'
import Paper from '@mui/material/Paper'
import ListItemIcon from '@mui/material/ListItemIcon'
import VolumeUp from '@mui/icons-material/VolumeUp'
import VolumeDown from '@mui/icons-material/VolumeDown'
import VolumeMute from '@mui/icons-material/VolumeMute'
import VolumeUpIcon from '@mui/icons-material/VolumeUp'
import VolumeDownIcon from '@mui/icons-material/VolumeDown'
import VolumeMuteIcon from '@mui/icons-material/VolumeMute'
import MicIcon from '@mui/icons-material/Mic'
import LaptopWindowsIcon from '@mui/icons-material/LaptopWindows'
import Tooltip from '@mui/material/Tooltip'
import { AudioChannelName } from 'models/chat'
interface AudioVolumeProps {
audioEl: HTMLAudioElement
audioChannelName: AudioChannelName
}
export const AudioVolume = ({ audioEl }: AudioVolumeProps) => {
export const AudioVolume = ({
audioEl,
audioChannelName,
}: AudioVolumeProps) => {
const [audioVolume, setAudioVolume] = useState(audioEl.volume)
useEffect(() => {
@ -32,27 +41,48 @@ export const AudioVolume = ({ audioEl }: AudioVolumeProps) => {
const formatLabelValue = () => `${Math.round(audioVolume * 100)}%`
let VolumeIcon = VolumeUp
let VolumeIcon = VolumeUpIcon
if (audioVolume === 0) {
VolumeIcon = VolumeMute
VolumeIcon = VolumeMuteIcon
} else if (audioVolume < 0.5) {
VolumeIcon = VolumeDown
VolumeIcon = VolumeDownIcon
}
return (
<Box sx={{ display: 'flex', pt: 1, pr: 3, alignItems: 'center' }}>
<ListItemIcon>
<VolumeIcon sx={{ cursor: 'pointer' }} onClick={handleIconClick} />
<Paper
sx={{
alignItems: 'center',
display: 'flex',
mt: 1.5,
pl: 2,
pr: 3,
py: 1,
}}
>
<ListItemIcon sx={{ cursor: 'pointer' }} onClick={handleIconClick}>
<VolumeIcon fontSize="small" />
{audioChannelName === AudioChannelName.MICROPHONE && (
<Tooltip title="Their microphone volume">
<MicIcon fontSize="small" sx={{ ml: 1, mr: 2 }} />
</Tooltip>
)}
{audioChannelName === AudioChannelName.SCREEN_SHARE && (
<Tooltip title="Their screen's volume">
<LaptopWindowsIcon fontSize="small" sx={{ ml: 1, mr: 2 }} />
</Tooltip>
)}
</ListItemIcon>
<Slider
aria-label="Volume"
getAriaValueText={formatLabelValue}
valueLabelFormat={formatLabelValue}
valueLabelDisplay="auto"
onChange={handleSliderChange}
value={audioVolume * 100}
></Slider>
</Box>
<Box display="flex" width={1}>
<Slider
aria-label="Volume"
getAriaValueText={formatLabelValue}
valueLabelFormat={formatLabelValue}
valueLabelDisplay="auto"
onChange={handleSliderChange}
value={audioVolume * 100}
></Slider>
</Box>
</Paper>
)
}

View File

@ -3,7 +3,7 @@ import Paper from '@mui/material/Paper'
import Tooltip from '@mui/material/Tooltip'
import { PeerNameDisplay } from 'components/PeerNameDisplay'
import { VideoStreamType } from 'models/chat'
import { StreamType } from 'models/chat'
import { SelectedPeerStream } from './RoomVideoDisplay'
@ -13,13 +13,13 @@ interface PeerVideoProps {
numberOfVideos: number
onVideoClick?: (
userId: string,
videoStreamType: VideoStreamType,
streamType: StreamType,
videoStream: MediaStream
) => void
selectedPeerStream: SelectedPeerStream | null
userId: string
videoStream: MediaStream
videoStreamType: VideoStreamType
streamType: StreamType
}
// Adapted from https://www.geeksforgeeks.org/find-the-next-perfect-square-greater-than-a-given-number/
@ -37,7 +37,7 @@ export const PeerVideo = ({
userId,
selectedPeerStream,
videoStream,
videoStreamType,
streamType,
}: PeerVideoProps) => {
const videoRef = useRef<HTMLVideoElement>(null)
@ -47,13 +47,14 @@ export const PeerVideo = ({
video.autoplay = true
video.srcObject = videoStream
video.muted = true
}, [videoRef, videoStream])
const cols = Math.sqrt(nextPerfectSquare(numberOfVideos - 1))
const rows = Math.ceil(numberOfVideos / cols)
const handleVideoClick = () => {
onVideoClick?.(userId, videoStreamType, videoStream)
onVideoClick?.(userId, streamType, videoStream)
}
return (

View File

@ -4,7 +4,7 @@ import Paper from '@mui/material/Paper'
import { RoomContext } from 'contexts/RoomContext'
import { ShellContext } from 'contexts/ShellContext'
import { Peer, VideoStreamType } from 'models/chat'
import { Peer, StreamType } from 'models/chat'
import { PeerVideo } from './PeerVideo'
@ -16,7 +16,7 @@ interface PeerWithVideo {
export interface SelectedPeerStream {
peerId: string
videoStreamType: VideoStreamType
streamType: StreamType
videoStream: MediaStream
}
@ -105,13 +105,13 @@ export const RoomVideoDisplay = ({
const handleVideoClick = (
peerId: string,
videoStreamType: VideoStreamType,
streamType: StreamType,
videoStream: MediaStream
) => {
if (selectedPeerStream?.videoStream === videoStream) {
setSelectedPeerStream(null)
} else if (numberOfVideos > 1) {
setSelectedPeerStream({ peerId, videoStreamType, videoStream })
setSelectedPeerStream({ peerId, streamType, videoStream })
}
}
@ -139,7 +139,7 @@ export const RoomVideoDisplay = ({
userId={selectedPeerStream.peerId}
selectedPeerStream={selectedPeerStream}
videoStream={selectedPeerStream.videoStream}
videoStreamType={selectedPeerStream.videoStreamType}
streamType={selectedPeerStream.streamType}
/>
</Box>
)}
@ -168,7 +168,7 @@ export const RoomVideoDisplay = ({
userId={userId}
selectedPeerStream={selectedPeerStream}
videoStream={selfVideoStream}
videoStreamType={VideoStreamType.WEBCAM}
streamType={StreamType.WEBCAM}
/>
)}
{selfScreenStream && (
@ -179,7 +179,7 @@ export const RoomVideoDisplay = ({
userId={userId}
selectedPeerStream={selectedPeerStream}
videoStream={selfScreenStream}
videoStreamType={VideoStreamType.SCREEN_SHARE}
streamType={StreamType.SCREEN_SHARE}
/>
)}
{peersWithVideo.map(peerWithVideo => (
@ -191,7 +191,7 @@ export const RoomVideoDisplay = ({
userId={peerWithVideo.peer.userId}
selectedPeerStream={selectedPeerStream}
videoStream={peerWithVideo.videoStream}
videoStreamType={VideoStreamType.WEBCAM}
streamType={StreamType.WEBCAM}
/>
)}
{peerWithVideo.screenStream && (
@ -201,7 +201,7 @@ export const RoomVideoDisplay = ({
userId={peerWithVideo.peer.userId}
selectedPeerStream={selectedPeerStream}
videoStream={peerWithVideo.screenStream}
videoStreamType={VideoStreamType.SCREEN_SHARE}
streamType={StreamType.SCREEN_SHARE}
/>
)}
</Fragment>

View File

@ -23,6 +23,7 @@ import {
TypingStatus,
Peer,
PeerVerificationState,
AudioChannelName,
} from 'models/chat'
import { getPeerName, usePeerNameDisplay } from 'components/PeerNameDisplay'
import { Audio } from 'lib/Audio'
@ -269,7 +270,10 @@ export function useRoom(
userId,
publicKey,
customUsername,
audioState: AudioState.STOPPED,
audioChannelState: {
[AudioChannelName.MICROPHONE]: AudioState.STOPPED,
[AudioChannelName.SCREEN_SHARE]: AudioState.STOPPED,
},
videoState: VideoState.STOPPED,
screenShareState: ScreenShareState.NOT_SHARING,
offeredFileId: null,

View File

@ -2,7 +2,13 @@ import { useContext, useEffect, useCallback, useState } from 'react'
import { ShellContext } from 'contexts/ShellContext'
import { PeerActions } from 'models/network'
import { AudioState, Peer } from 'models/chat'
import {
AudioState,
Peer,
AudioChannelName,
PeerAudioChannelState,
StreamType,
} from 'models/chat'
import { PeerRoom, PeerHookType, PeerStreamType } from 'lib/PeerRoom'
interface UseRoomAudioConfig {
@ -19,7 +25,7 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
string | null
>(null)
const { peerList, setPeerList, setAudioState, peerAudios, setPeerAudios } =
const { setPeerList, setAudioChannelState, setPeerAudioChannels } =
shellContext
useEffect(() => {
@ -32,29 +38,46 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
})()
}, [audioStream])
const [sendAudioChange, receiveAudioChange] = peerRoom.makeAction<AudioState>(
PeerActions.AUDIO_CHANGE
)
const [sendAudioChange, receiveAudioChange] = peerRoom.makeAction<
Partial<PeerAudioChannelState>
>(PeerActions.AUDIO_CHANGE)
receiveAudioChange((audioState, peerId) => {
const newPeerList = peerList.map(peer => {
const newPeer: Peer = { ...peer }
receiveAudioChange((peerAudioChannelState, peerId) => {
setPeerList(peerList => {
return peerList.map(peer => {
const newPeer: Peer = { ...peer }
if (peer.peerId === peerId) {
newPeer.audioState = audioState
const microphoneAudioChannel =
peerAudioChannelState[AudioChannelName.MICROPHONE]
if (audioState === AudioState.STOPPED) {
deletePeerAudio(peerId)
if (microphoneAudioChannel) {
if (peer.peerId === peerId) {
newPeer.audioChannelState = {
...newPeer.audioChannelState,
...peerAudioChannelState,
}
if (microphoneAudioChannel === AudioState.STOPPED) {
deletePeerAudio(peerId)
}
}
}
}
return newPeer
return newPeer
})
})
setPeerList(newPeerList)
})
peerRoom.onPeerStream(PeerStreamType.AUDIO, (stream, peerId) => {
peerRoom.onPeerStream(PeerStreamType.AUDIO, (stream, peerId, metadata) => {
if (
typeof metadata === 'object' &&
metadata !== null &&
'type' in metadata &&
metadata.type !== StreamType.MICROPHONE
) {
return
}
const audioTracks = stream.getAudioTracks()
if (audioTracks.length === 0) return
@ -63,7 +86,13 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
audio.srcObject = stream
audio.autoplay = true
setPeerAudios({ ...peerAudios, [peerId]: audio })
setPeerAudioChannels(peerAudioChannels => ({
...peerAudioChannels,
[peerId]: {
...peerAudioChannels[peerId],
[AudioChannelName.MICROPHONE]: audio,
},
}))
})
const cleanupAudio = useCallback(() => {
@ -86,9 +115,19 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
video: false,
})
peerRoom.addStream(newSelfStream)
sendAudioChange(AudioState.PLAYING)
setAudioState(AudioState.PLAYING)
peerRoom.addStream(newSelfStream, null, {
type: StreamType.MICROPHONE,
})
sendAudioChange({
[AudioChannelName.MICROPHONE]: AudioState.PLAYING,
})
setAudioChannelState(prevState => ({
...prevState,
[AudioChannelName.MICROPHONE]: AudioState.PLAYING,
}))
setAudioStream(newSelfStream)
}
} else {
@ -96,8 +135,16 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
cleanupAudio()
peerRoom.removeStream(audioStream, peerRoom.getPeers())
sendAudioChange(AudioState.STOPPED)
setAudioState(AudioState.STOPPED)
sendAudioChange({
[AudioChannelName.MICROPHONE]: AudioState.STOPPED,
})
setAudioChannelState(prevState => ({
...prevState,
[AudioChannelName.MICROPHONE]: AudioState.STOPPED,
}))
setAudioStream(null)
}
}
@ -106,11 +153,10 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
audioStream,
cleanupAudio,
isSpeakingToRoom,
peerAudios,
peerRoom,
selectedAudioDeviceId,
sendAudioChange,
setAudioState,
setAudioChannelState,
])
useEffect(() => {
@ -139,27 +185,45 @@ export function useRoomAudio({ peerRoom }: UseRoomAudioConfig) {
video: false,
})
peerRoom.addStream(newSelfStream)
peerRoom.addStream(newSelfStream, null, {
type: StreamType.MICROPHONE,
})
setAudioStream(newSelfStream)
}
const deletePeerAudio = (peerId: string) => {
const newPeerAudios = { ...peerAudios }
delete newPeerAudios[peerId]
setPeerAudios(newPeerAudios)
setPeerAudioChannels(({ ...newPeerAudios }) => {
if (!newPeerAudios[peerId]) {
return newPeerAudios
}
const microphoneAudio = newPeerAudios[peerId][AudioChannelName.MICROPHONE]
microphoneAudio?.pause()
const { [AudioChannelName.MICROPHONE]: _, ...newPeerAudioChannels } =
newPeerAudios[peerId]
newPeerAudios[peerId] = newPeerAudioChannels
return newPeerAudios
})
}
const handleAudioForNewPeer = (peerId: string) => {
if (audioStream) {
peerRoom.addStream(audioStream, peerId)
peerRoom.addStream(audioStream, peerId, {
type: StreamType.MICROPHONE,
})
}
}
const handleAudioForLeavingPeer = (peerId: string) => {
if (audioStream) {
peerRoom.removeStream(audioStream, peerId)
deletePeerAudio(peerId)
}
deletePeerAudio(peerId)
}
peerRoom.onPeerJoin(PeerHookType.AUDIO, (peerId: string) => {

View File

@ -4,7 +4,13 @@ import { isRecord } from 'lib/type-guards'
import { RoomContext } from 'contexts/RoomContext'
import { ShellContext } from 'contexts/ShellContext'
import { PeerActions } from 'models/network'
import { ScreenShareState, Peer, VideoStreamType } from 'models/chat'
import {
ScreenShareState,
Peer,
StreamType,
AudioChannelName,
AudioState,
} from 'models/chat'
import { PeerRoom, PeerHookType, PeerStreamType } from 'lib/PeerRoom'
interface UseRoomScreenShareConfig {
@ -16,7 +22,13 @@ export function useRoomScreenShare({ peerRoom }: UseRoomScreenShareConfig) {
const roomContext = useContext(RoomContext)
const [isSharingScreen, setIsSharingScreen] = useState(false)
const { peerList, setPeerList, setScreenState } = shellContext
const {
peerList,
setPeerList,
setScreenState,
setAudioChannelState,
setPeerAudioChannels,
} = shellContext
const {
peerScreenStreams,
@ -50,7 +62,7 @@ export function useRoomScreenShare({ peerRoom }: UseRoomScreenShareConfig) {
const isScreenShareStream =
isRecord(metadata) &&
'type' in metadata &&
metadata.type === VideoStreamType.SCREEN_SHARE
metadata.type === StreamType.SCREEN_SHARE
if (!isScreenShareStream) return
@ -58,6 +70,33 @@ export function useRoomScreenShare({ peerRoom }: UseRoomScreenShareConfig) {
...peerScreenStreams,
[peerId]: stream,
})
const [audioStream] = stream.getAudioTracks()
if (audioStream) {
setAudioChannelState(prevState => ({
...prevState,
[AudioChannelName.SCREEN_SHARE]: AudioState.PLAYING,
}))
const audioTracks = stream.getAudioTracks()
if (audioTracks.length > 0) {
const audio = new Audio()
audio.srcObject = stream
audio.autoplay = true
setPeerAudioChannels(peerAudioChannels => {
return {
...peerAudioChannels,
[peerId]: {
...peerAudioChannels[peerId],
[AudioChannelName.SCREEN_SHARE]: audio,
},
}
})
}
}
})
const cleanupScreenStream = useCallback(() => {
@ -78,8 +117,9 @@ export function useRoomScreenShare({ peerRoom }: UseRoomScreenShareConfig) {
})
peerRoom.addStream(displayMedia, null, {
type: VideoStreamType.SCREEN_SHARE,
type: StreamType.SCREEN_SHARE,
})
setSelfScreenStream(displayMedia)
sendScreenShare(ScreenShareState.SHARING)
setScreenState(ScreenShareState.SHARING)
@ -119,15 +159,33 @@ export function useRoomScreenShare({ peerRoom }: UseRoomScreenShareConfig) {
}, [setPeerScreenStreams])
const deletePeerScreen = (peerId: string) => {
const newPeerScreens = { ...peerScreenStreams }
delete newPeerScreens[peerId]
setPeerScreenStreams(newPeerScreens)
setPeerScreenStreams(({ [peerId]: _, ...newPeerScreens }) => {
return newPeerScreens
})
setPeerAudioChannels(({ ...newPeerAudios }) => {
if (!newPeerAudios[peerId]) {
return newPeerAudios
}
const screenShareAudio =
newPeerAudios[peerId][AudioChannelName.SCREEN_SHARE]
screenShareAudio?.pause()
const { [AudioChannelName.SCREEN_SHARE]: _, ...newPeerAudioChannels } =
newPeerAudios[peerId]
newPeerAudios[peerId] = newPeerAudioChannels
return newPeerAudios
})
}
const handleScreenForNewPeer = (peerId: string) => {
if (selfScreenStream) {
peerRoom.addStream(selfScreenStream, peerId, {
type: VideoStreamType.SCREEN_SHARE,
type: StreamType.SCREEN_SHARE,
})
}
}

View File

@ -3,7 +3,7 @@ import { useContext, useEffect, useCallback, useState } from 'react'
import { RoomContext } from 'contexts/RoomContext'
import { ShellContext } from 'contexts/ShellContext'
import { PeerActions } from 'models/network'
import { VideoState, Peer, VideoStreamType } from 'models/chat'
import { VideoState, Peer, StreamType } from 'models/chat'
import { PeerRoom, PeerHookType, PeerStreamType } from 'lib/PeerRoom'
import { isRecord } from 'lib/type-guards'
@ -60,8 +60,9 @@ export function useRoomVideo({ peerRoom }: UseRoomVideoConfig) {
})
peerRoom.addStream(newSelfStream, null, {
type: VideoStreamType.WEBCAM,
type: StreamType.WEBCAM,
})
setSelfVideoStream(newSelfStream)
}
})()
@ -93,7 +94,7 @@ export function useRoomVideo({ peerRoom }: UseRoomVideoConfig) {
const isWebcamStream =
isRecord(metadata) &&
'type' in metadata &&
metadata.type === VideoStreamType.WEBCAM
metadata.type === StreamType.WEBCAM
if (!isWebcamStream) return
@ -124,8 +125,9 @@ export function useRoomVideo({ peerRoom }: UseRoomVideoConfig) {
})
peerRoom.addStream(newSelfStream, null, {
type: VideoStreamType.WEBCAM,
type: StreamType.WEBCAM,
})
sendVideoChange(VideoState.PLAYING)
setVideoState(VideoState.PLAYING)
setSelfVideoStream(newSelfStream)
@ -193,7 +195,7 @@ export function useRoomVideo({ peerRoom }: UseRoomVideoConfig) {
},
})
peerRoom.addStream(newSelfStream, null, { type: VideoStreamType.WEBCAM })
peerRoom.addStream(newSelfStream, null, { type: StreamType.WEBCAM })
setSelfVideoStream(newSelfStream)
}
@ -206,7 +208,7 @@ export function useRoomVideo({ peerRoom }: UseRoomVideoConfig) {
const handleVideoForNewPeer = (peerId: string) => {
if (selfVideoStream) {
peerRoom.addStream(selfVideoStream, peerId, {
type: VideoStreamType.WEBCAM,
type: StreamType.WEBCAM,
})
}
}

View File

@ -9,7 +9,13 @@ import Box from '@mui/material/Box'
import CircularProgress from '@mui/material/CircularProgress'
import { UserInfo } from 'components/UserInfo'
import { AudioState, Peer } from 'models/chat'
import {
AudioState,
Peer,
AudioChannel,
AudioChannelName,
PeerAudioChannelState,
} from 'models/chat'
import { PeerConnectionType } from 'lib/PeerRoom'
import { TrackerConnection } from 'lib/ConnectionTest'
@ -25,8 +31,8 @@ export interface PeerListProps extends PropsWithChildren {
onPeerListClose: () => void
peerList: Peer[]
peerConnectionTypes: Record<string, PeerConnectionType>
audioState: AudioState
peerAudios: Record<string, HTMLAudioElement>
peerAudioChannelState: PeerAudioChannelState
peerAudioChannels: Record<string, AudioChannel>
connectionTestResults: IConnectionTestResults
}
@ -36,8 +42,8 @@ export const PeerList = ({
onPeerListClose,
peerList,
peerConnectionTypes,
audioState,
peerAudios,
peerAudioChannelState,
peerAudioChannels,
connectionTestResults,
}: PeerListProps) => {
return (
@ -49,7 +55,8 @@ export const PeerList = ({
<Divider />
<List>
<ListItem divider={true}>
{audioState === AudioState.PLAYING && (
{peerAudioChannelState[AudioChannelName.MICROPHONE] ===
AudioState.PLAYING && (
<ListItemIcon>
<VolumeUp />
</ListItemIcon>
@ -63,7 +70,7 @@ export const PeerList = ({
key={peer.peerId}
peer={peer}
peerConnectionTypes={peerConnectionTypes}
peerAudios={peerAudios}
peerAudioChannels={peerAudioChannels}
/>
))}
{peerList.length === 0 &&

View File

@ -18,7 +18,12 @@ import EnhancedEncryptionIcon from '@mui/icons-material/EnhancedEncryption'
import { AudioVolume } from 'components/AudioVolume'
import { PeerNameDisplay } from 'components/PeerNameDisplay'
import { PublicKey } from 'components/PublicKey'
import { Peer, PeerVerificationState } from 'models/chat'
import {
Peer,
AudioChannel,
AudioChannelName,
PeerVerificationState,
} from 'models/chat'
import { PeerConnectionType } from 'lib/PeerRoom'
import { PeerDownloadFileButton } from './PeerDownloadFileButton'
@ -26,7 +31,7 @@ import { PeerDownloadFileButton } from './PeerDownloadFileButton'
interface PeerListItemProps {
peer: Peer
peerConnectionTypes: Record<string, PeerConnectionType>
peerAudios: Record<string, HTMLAudioElement>
peerAudioChannels: Record<string, AudioChannel>
}
const verificationStateDisplayMap = {
@ -52,8 +57,8 @@ const iconRightPadding = 1
export const PeerListItem = ({
peer,
peerConnectionTypes,
peerAudios,
}: PeerListItemProps): JSX.Element => {
peerAudioChannels,
}: PeerListItemProps) => {
const [showPeerDialog, setShowPeerDialog] = useState(false)
const hasPeerConnection = peer.peerId in peerConnectionTypes
@ -69,6 +74,11 @@ export const PeerListItem = ({
setShowPeerDialog(false)
}
const microphoneAudio =
peerAudioChannels[peer.peerId]?.[AudioChannelName.MICROPHONE]
const screenShareAudio =
peerAudioChannels[peer.peerId]?.[AudioChannelName.SCREEN_SHARE]
return (
<>
<ListItem key={peer.peerId} divider={true}>
@ -124,8 +134,17 @@ export const PeerListItem = ({
</Box>
<PeerNameDisplay>{peer.userId}</PeerNameDisplay>
</Box>
{peer.peerId in peerAudios && (
<AudioVolume audioEl={peerAudios[peer.peerId]} />
{microphoneAudio && (
<AudioVolume
audioEl={microphoneAudio}
audioChannelName={AudioChannelName.MICROPHONE}
/>
)}
{screenShareAudio && (
<AudioVolume
audioEl={screenShareAudio}
audioChannelName={AudioChannelName.SCREEN_SHARE}
/>
)}
</ListItemText>
</ListItem>

View File

@ -19,7 +19,15 @@ import { useWindowSize } from '@react-hook/window-size'
import { ShellContext } from 'contexts/ShellContext'
import { SettingsContext } from 'contexts/SettingsContext'
import { AlertOptions, QueryParamKeys } from 'models/shell'
import { AudioState, ScreenShareState, VideoState, Peer } from 'models/chat'
import {
AudioState,
ScreenShareState,
VideoState,
Peer,
AudioChannel,
PeerAudioChannelState,
AudioChannelName,
} from 'models/chat'
import { ErrorBoundary } from 'components/ErrorBoundary'
import { PeerConnectionType } from 'lib/PeerRoom'
@ -86,7 +94,11 @@ export const Shell = ({ appNeedsUpdate, children, userPeerId }: ShellProps) => {
Record<string, PeerConnectionType>
>({})
const [tabHasFocus, setTabHasFocus] = useState(true)
const [audioState, setAudioState] = useState<AudioState>(AudioState.STOPPED)
const [audioChannelState, setAudioChannelState] =
useState<PeerAudioChannelState>({
[AudioChannelName.MICROPHONE]: AudioState.STOPPED,
[AudioChannelName.SCREEN_SHARE]: AudioState.STOPPED,
})
const [videoState, setVideoState] = useState<VideoState>(VideoState.STOPPED)
const [screenState, setScreenState] = useState<ScreenShareState>(
ScreenShareState.NOT_SHARING
@ -94,8 +106,8 @@ export const Shell = ({ appNeedsUpdate, children, userPeerId }: ShellProps) => {
const [customUsername, setCustomUsername] = useState(
getUserSettings().customUsername
)
const [peerAudios, setPeerAudios] = useState<
Record<string, HTMLAudioElement>
const [peerAudioChannels, setPeerAudioChannels] = useState<
Record<string, AudioChannel>
>({})
const showAlert = useCallback((message: string, options?: AlertOptions) => {
@ -144,14 +156,14 @@ export const Shell = ({ appNeedsUpdate, children, userPeerId }: ShellProps) => {
setIsServerConnectionFailureDialogOpen,
peerConnectionTypes,
setPeerConnectionTypes,
audioState,
setAudioState,
audioChannelState,
setAudioChannelState,
videoState,
setVideoState,
screenState,
setScreenState,
peerAudios,
setPeerAudios,
peerAudioChannels,
setPeerAudioChannels,
customUsername,
setCustomUsername,
connectionTestResults,
@ -174,14 +186,14 @@ export const Shell = ({ appNeedsUpdate, children, userPeerId }: ShellProps) => {
setShowRoomControls,
setTitle,
showAlert,
audioState,
setAudioState,
audioChannelState,
setAudioChannelState,
videoState,
setVideoState,
screenState,
setScreenState,
peerAudios,
setPeerAudios,
peerAudioChannels,
setPeerAudioChannels,
customUsername,
setCustomUsername,
connectionTestResults,
@ -393,8 +405,8 @@ export const Shell = ({ appNeedsUpdate, children, userPeerId }: ShellProps) => {
onPeerListClose={handlePeerListClick}
peerList={peerList}
peerConnectionTypes={peerConnectionTypes}
audioState={audioState}
peerAudios={peerAudios}
peerAudioChannelState={audioChannelState}
peerAudioChannels={peerAudioChannels}
connectionTestResults={connectionTestResults}
/>
{isEmbedded ? (

View File

@ -1,7 +1,15 @@
import { createContext, Dispatch, SetStateAction } from 'react'
import { AlertOptions } from 'models/shell'
import { AudioState, ScreenShareState, VideoState, Peer } from 'models/chat'
import {
AudioState,
ScreenShareState,
VideoState,
Peer,
AudioChannel,
PeerAudioChannelState,
AudioChannelName,
} from 'models/chat'
import { PeerConnectionType } from 'lib/PeerRoom'
import { ConnectionTestResults } from 'components/Shell/useConnectionTest'
import { TrackerConnection } from 'lib/ConnectionTest'
@ -27,14 +35,14 @@ interface ShellContextProps {
setPeerConnectionTypes: Dispatch<
SetStateAction<Record<string, PeerConnectionType>>
>
audioState: AudioState
setAudioState: Dispatch<SetStateAction<AudioState>>
audioChannelState: PeerAudioChannelState
setAudioChannelState: Dispatch<SetStateAction<PeerAudioChannelState>>
videoState: VideoState
setVideoState: Dispatch<SetStateAction<VideoState>>
screenState: ScreenShareState
setScreenState: Dispatch<SetStateAction<ScreenShareState>>
peerAudios: Record<string, HTMLAudioElement>
setPeerAudios: Dispatch<SetStateAction<Record<string, HTMLAudioElement>>>
peerAudioChannels: Record<string, AudioChannel>
setPeerAudioChannels: Dispatch<SetStateAction<Record<string, AudioChannel>>>
customUsername: string
setCustomUsername: Dispatch<SetStateAction<string>>
connectionTestResults: ConnectionTestResults
@ -60,14 +68,17 @@ export const ShellContext = createContext<ShellContextProps>({
setIsServerConnectionFailureDialogOpen: () => {},
peerConnectionTypes: {},
setPeerConnectionTypes: () => {},
audioState: AudioState.STOPPED,
setAudioState: () => {},
audioChannelState: {
[AudioChannelName.MICROPHONE]: AudioState.STOPPED,
[AudioChannelName.SCREEN_SHARE]: AudioState.STOPPED,
},
setAudioChannelState: () => {},
videoState: VideoState.STOPPED,
setVideoState: () => {},
screenState: ScreenShareState.NOT_SHARING,
setScreenState: () => {},
peerAudios: {},
setPeerAudios: () => {},
peerAudioChannels: {},
setPeerAudioChannels: () => {},
customUsername: '',
setCustomUsername: () => {},
connectionTestResults: {

View File

@ -2,6 +2,7 @@ import { joinRoom, Room, BaseRoomConfig, DataPayload } from 'trystero'
import { RelayConfig } from 'trystero/torrent'
import { sleep } from 'lib/sleep'
import { StreamType } from 'models/chat'
export enum PeerHookType {
NEW_PEER = 'NEW_PEER',
@ -171,12 +172,16 @@ export class PeerRoom {
return this.room.makeAction<T>(namespace)
}
addStream = (...args: Parameters<Room['addStream']>) => {
addStream = (
stream: Parameters<Room['addStream']>[0],
targetPeers: Parameters<Room['addStream']>[1],
metadata: { type: StreamType }
) => {
// New streams need to be added as a delayed queue to prevent race
// conditions on the receiver's end where streams and their metadata get
// mixed up.
this.streamQueue.push(
() => Promise.all(this.room.addStream(...args)),
() => Promise.all(this.room.addStream(stream, targetPeers, metadata)),
() => sleep(streamQueueAddDelay)
)

View File

@ -31,9 +31,10 @@ export enum VideoState {
STOPPED = 'STOPPED',
}
export enum VideoStreamType {
export enum StreamType {
WEBCAM = 'WEBCAM',
SCREEN_SHARE = 'SCREEN_SHARE',
MICROPHONE = 'MICROPHONE',
}
export enum ScreenShareState {
@ -47,12 +48,21 @@ export enum PeerVerificationState {
VERIFIED,
}
export enum AudioChannelName {
MICROPHONE = 'microphone',
SCREEN_SHARE = 'screen-share',
}
export type AudioChannel = Partial<Record<AudioChannelName, HTMLAudioElement>>
export type PeerAudioChannelState = Record<AudioChannelName, AudioState>
export interface Peer {
peerId: string
userId: string
publicKey: CryptoKey
customUsername: string
audioState: AudioState
audioChannelState: PeerAudioChannelState
videoState: VideoState
screenShareState: ScreenShareState
offeredFileId: string | null