import type { MessageDecoder, MessageEncoder, TrackReferenceOrPlaceholder, WidgetState, } from '@livekit/components-react'; import { isTrackReference } from '@livekit/components-react'; import { log } from './logger'; import { isWeb } from './detectMobileBrowser'; import { isEqualTrackRef } from './track-reference'; import { RoomEvent, Track } from 'livekit-client'; import * as React from 'react'; import type { MessageFormatter } from '@livekit/components-react'; import { CarouselLayout, ConnectionStateToast, FocusLayout, FocusLayoutContainer, GridLayout, LayoutContextProvider, ParticipantTile, RoomAudioRenderer, } from '@livekit/components-react'; import { useCreateLayoutContext } from '@livekit/components-react'; import { usePinnedTracks, useTracks } from '@livekit/components-react'; import { ControlBar } from '@livekit/components-react'; import { useWarnAboutMissingStyles } from './useWarnAboutMissingStyles'; import { useLocalParticipant } from '@livekit/components-react'; /** * @public */ export interface VideoConferenceProps extends React.HTMLAttributes { chatMessageFormatter?: MessageFormatter; chatMessageEncoder?: MessageEncoder; chatMessageDecoder?: MessageDecoder; /** @alpha */ SettingsComponent?: React.ComponentType; } /** * The `VideoConference` ready-made component is your drop-in solution for a classic video conferencing application. * It provides functionality such as focusing on one participant, grid view with pagination to handle large numbers * of participants, basic non-persistent chat, screen sharing, and more. * * @remarks * The component is implemented with other LiveKit components like `FocusContextProvider`, * `GridLayout`, `ControlBar`, `FocusLayoutContainer` and `FocusLayout`. * You can use these components as a starting point for your own custom video conferencing application. * * @example * ```tsx * * * * ``` * @public */ export function VideoConference({ chatMessageFormatter, chatMessageDecoder, chatMessageEncoder, SettingsComponent, ...props }: VideoConferenceProps) { const [widgetState, setWidgetState] = React.useState({ showChat: false, unreadMessages: 0, showSettings: false, }); const lastAutoFocusedScreenShareTrack = React.useRef(null); const tracks = useTracks( [ { source: Track.Source.Camera, withPlaceholder: true }, { source: Track.Source.ScreenShare, withPlaceholder: false }, ], { updateOnlyOn: [RoomEvent.ActiveSpeakersChanged], onlySubscribed: false }, ); const widgetUpdate = (state: WidgetState) => { log.debug('updating widget state', state); setWidgetState(state); }; const layoutContext = useCreateLayoutContext(); const screenShareTracks = tracks .filter(isTrackReference) .filter((track) => track.publication.source === Track.Source.ScreenShare); const focusTrack = usePinnedTracks(layoutContext)?.[0]; const carouselTracks = tracks.filter((track) => !isEqualTrackRef(track, focusTrack)); const { localParticipant } = useLocalParticipant(); const [isAlwaysListening, setIsAlwaysListening] = React.useState(false); const toggleAlwaysListening = () => { const newValue = !isAlwaysListening; setIsAlwaysListening(newValue); handleAlwaysListeningToggle(newValue); }; const handleAlwaysListeningToggle = (newValue: boolean) => { if (newValue) { console.log("SETTING VIDEO CONTEXT ON") const data = new TextEncoder().encode("{VIDEO_CONTEXT_ON}") localParticipant.publishData(data, {reliable: true, topic: "video_context"}) } else { console.log("SETTING VIDEO CONTEXT OFF") const data = new TextEncoder().encode("{VIDEO_CONTEXT_OFF}") localParticipant.publishData(data, {reliable: true, topic: "video_context"}) } } React.useEffect(() => { // If screen share tracks are published, and no pin is set explicitly, auto set the screen share. if ( screenShareTracks.some((track) => track.publication.isSubscribed) && lastAutoFocusedScreenShareTrack.current === null ) { log.debug('Auto set screen share focus:', { newScreenShareTrack: screenShareTracks[0] }); layoutContext.pin.dispatch?.({ msg: 'set_pin', trackReference: screenShareTracks[0] }); lastAutoFocusedScreenShareTrack.current = screenShareTracks[0]; } else if ( lastAutoFocusedScreenShareTrack.current && !screenShareTracks.some( (track) => track.publication.trackSid === lastAutoFocusedScreenShareTrack.current?.publication?.trackSid, ) ) { log.debug('Auto clearing screen share focus.'); layoutContext.pin.dispatch?.({ msg: 'clear_pin' }); lastAutoFocusedScreenShareTrack.current = null; } if (focusTrack && !isTrackReference(focusTrack)) { const updatedFocusTrack = tracks.find( (tr) => tr.participant.identity === focusTrack.participant.identity && tr.source === focusTrack.source, ); if (updatedFocusTrack !== focusTrack && isTrackReference(updatedFocusTrack)) { layoutContext.pin.dispatch?.({ msg: 'set_pin', trackReference: updatedFocusTrack }); } } }, [ screenShareTracks .map((ref) => `${ref.publication.trackSid}_${ref.publication.isSubscribed}`) .join(), focusTrack?.publication?.trackSid, tracks, ]); useWarnAboutMissingStyles(); return (
{isWeb() && (
{!focusTrack ? (
) : (
{focusTrack && }
)}
{SettingsComponent && (
)}
)}
); }