Enable screen sharing

For now, only the video is transmitted.
The camera is turned off when screensharing, just like on the desktop
app.

In CallProvider, replace `isVideoOn` state with `videoStatus` to toggle
between off, on or screenshare.
Add icon for when screenshare is active.
Remove screenshare button menu options, as they are unavailable on the
web.

Change-Id: I95dd1c18efd91ab8b9063f0e7f817839e4f24aba
diff --git a/client/src/components/CallButtons.tsx b/client/src/components/CallButtons.tsx
index b77fd5a..db0f530 100644
--- a/client/src/components/CallButtons.tsx
+++ b/client/src/components/CallButtons.tsx
@@ -19,9 +19,8 @@
 import { IconButton, IconButtonProps, PaletteColor } from '@mui/material';
 import { styled, Theme } from '@mui/material/styles';
 import { ChangeEvent, useContext, useMemo } from 'react';
-import { useTranslation } from 'react-i18next';
 
-import { CallContext, CallStatus } from '../contexts/CallProvider';
+import { CallContext, CallStatus, VideoStatus } from '../contexts/CallProvider';
 import {
   ExpandableButton,
   ExpandableButtonProps,
@@ -33,7 +32,6 @@
   CallEndIcon,
   ChatBubbleIcon,
   ExtensionIcon,
-  FileIcon,
   FullScreenIcon,
   GroupAddIcon,
   MicroIcon,
@@ -43,12 +41,10 @@
   RecordingIcon,
   RoundCloseIcon,
   ScreenShareArrowIcon,
-  ScreenShareRegularIcon,
-  ScreenShareScreenAreaIcon,
+  ScreenShareStopIcon,
   VideoCameraIcon,
   VideoCameraOffIcon,
   VolumeIcon,
-  WindowIcon,
 } from './SvgIcon';
 
 const CallButton = styled((props: ExpandableButtonProps) => {
@@ -151,30 +147,22 @@
 };
 
 export const CallingScreenShareButton = (props: ExpandableButtonProps) => {
-  const { t } = useTranslation();
   return (
-    <CallButton
-      aria-label="screen share"
-      expandMenuOnClick
-      Icon={ScreenShareArrowIcon}
-      expandMenuOptions={[
-        {
-          description: t('share_screen'),
-          icon: <ScreenShareRegularIcon />,
-        },
-        {
-          description: t('share_window'),
-          icon: <WindowIcon />,
-        },
-        {
-          description: t('share_screen_area'),
-          icon: <ScreenShareScreenAreaIcon />,
-        },
-        {
-          description: t('share_file'),
-          icon: <FileIcon />,
-        },
-      ]}
+    <CallButton aria-label="screen share" expandMenuOnClick IconButtonComp={ToggleScreenShareIconButton} {...props} />
+  );
+};
+
+const ToggleScreenShareIconButton = (props: IconButtonProps) => {
+  const { videoStatus, updateVideoStatus } = useContext(CallContext);
+
+  return (
+    <ToggleIconButton
+      IconOff={ScreenShareArrowIcon}
+      IconOn={ScreenShareStopIcon}
+      selected={videoStatus === VideoStatus.ScreenShare}
+      toggle={() => {
+        updateVideoStatus((v) => (v !== VideoStatus.ScreenShare ? VideoStatus.ScreenShare : VideoStatus.Off));
+      }}
       {...props}
     />
   );
@@ -262,13 +250,15 @@
 };
 
 const ToggleVideoCameraIconButton = (props: IconButtonProps) => {
-  const { isVideoOn, setIsVideoOn } = useContext(CallContext);
+  const { videoStatus, updateVideoStatus } = useContext(CallContext);
   return (
     <ToggleIconButton
       IconOn={VideoCameraIcon}
       IconOff={VideoCameraOffIcon}
-      selected={isVideoOn}
-      toggle={() => setIsVideoOn((v) => !v)}
+      selected={videoStatus === VideoStatus.Camera}
+      toggle={() => {
+        updateVideoStatus((v) => (v !== VideoStatus.Camera ? VideoStatus.Camera : VideoStatus.Off));
+      }}
       {...props}
     />
   );
diff --git a/client/src/components/SvgIcon.tsx b/client/src/components/SvgIcon.tsx
index 8f83587..1ef5f9d 100644
--- a/client/src/components/SvgIcon.tsx
+++ b/client/src/components/SvgIcon.tsx
@@ -721,6 +721,40 @@
   );
 };
 
+export const ScreenShareStopIcon = (props: SvgIconProps) => {
+  return (
+    <SvgIcon {...props} viewBox="0 0 24 24">
+      <path id="Path" d="M12.6,8.9V7.8l2.6,2.2l-2.6,2.2V11c-3.4,0.1-4.2,2.6-4.2,2.6C8.5,9.6,11.7,9,12.6,8.9z" />
+      <g id="Icons_Outline">
+        <g id="Laptop_Black_24dp">
+          <g transform="translate(2.000000, 5.000000)">
+            <g id="Shape">
+              <path
+                d="M17,12H2.8c-0.5,0-0.9-0.2-1.2-0.5C1.2,11.1,1,10.7,1,10.2V1.3c0-0.5,0.2-0.9,0.5-1.2s0.8-0.5,1.2-0.5H17
+          c0.5,0,0.9,0.2,1.2,0.5c0.3,0.3,0.5,0.8,0.5,1.2v8.9c0,0.5-0.2,0.9-0.5,1.2S17.5,12,17,12z M2.8,0.9C2.7,0.9,2.6,1,2.5,1
+          c0,0.1-0.1,0.2-0.1,0.3v8.9c0,0.1,0,0.2,0.1,0.3c0,0,0.1,0.1,0.3,0.1H17c0.1,0,0.2-0.1,0.3-0.1c0.1-0.1,0.1-0.2,0.1-0.3V1.3
+          c0-0.1,0-0.2-0.1-0.3c0,0-0.1-0.1-0.3-0.1C17,0.9,2.8,0.9,2.8,0.9z"
+              />
+            </g>
+            <g id="Line-2">
+              <path d="M19.5,14.4h-19c-0.4,0-0.7-0.3-0.7-0.7S0.1,13,0.5,13h19c0.4,0,0.7,0.3,0.7,0.7S19.9,14.4,19.5,14.4z" />
+            </g>
+          </g>
+        </g>
+      </g>
+      <g>
+        <rect
+          x="2.9"
+          y="10.3"
+          transform="matrix(0.8327 -0.5537 0.5537 0.8327 -4.0847 8.3949)"
+          width="17.9"
+          height="1.4"
+        />
+      </g>
+    </SvgIcon>
+  );
+};
+
 export const TrashBinIcon = (props: SvgIconProps) => {
   return (
     <SvgIcon {...props} viewBox="0 0 15.44 16">
diff --git a/client/src/contexts/CallProvider.tsx b/client/src/contexts/CallProvider.tsx
index 39aad07..5464228 100644
--- a/client/src/contexts/CallProvider.tsx
+++ b/client/src/contexts/CallProvider.tsx
@@ -24,7 +24,7 @@
 import CallPermissionDenied from '../pages/CallPermissionDenied';
 import { CallRouteParams } from '../router';
 import { callTimeoutMs } from '../utils/constants';
-import { SetState, WithChildren } from '../utils/utils';
+import { AsyncSetState, SetState, WithChildren } from '../utils/utils';
 import { useConversationContext } from './ConversationProvider';
 import { MediaDevicesInfo, MediaInputKind, WebRtcContext } from './WebRtcProvider';
 import { IWebSocketContext, WebSocketContext } from './WebSocketProvider';
@@ -40,6 +40,12 @@
   PermissionsDenied,
 }
 
+export enum VideoStatus {
+  Off,
+  Camera,
+  ScreenShare,
+}
+
 type MediaDeviceIdState = {
   id: string | undefined;
   setId: (id: string | undefined) => void | Promise<void>;
@@ -52,8 +58,8 @@
 
   isAudioOn: boolean;
   setIsAudioOn: SetState<boolean>;
-  isVideoOn: boolean;
-  setIsVideoOn: SetState<boolean>;
+  videoStatus: VideoStatus;
+  updateVideoStatus: AsyncSetState<VideoStatus>;
   isChatShown: boolean;
   setIsChatShown: SetState<boolean>;
   isFullscreen: boolean;
@@ -89,8 +95,8 @@
 
   isAudioOn: false,
   setIsAudioOn: () => {},
-  isVideoOn: false,
-  setIsVideoOn: () => {},
+  videoStatus: VideoStatus.Off,
+  updateVideoStatus: () => Promise.reject(),
   isChatShown: false,
   setIsChatShown: () => {},
   isFullscreen: false,
@@ -122,8 +128,15 @@
   webSocket: IWebSocketContext;
 }) => {
   const { state: routeState } = useUrlParams<CallRouteParams>();
-  const { localStream, sendWebRtcOffer, iceConnectionState, closeConnection, getMediaDevices, updateLocalStream } =
-    useContext(WebRtcContext);
+  const {
+    localStream,
+    updateScreenShare,
+    sendWebRtcOffer,
+    iceConnectionState,
+    closeConnection,
+    getMediaDevices,
+    updateLocalStream,
+  } = useContext(WebRtcContext);
   const { conversationId, conversation } = useConversationContext();
   const navigate = useNavigate();
 
@@ -133,7 +146,7 @@
   const [videoDeviceId, setVideoDeviceId] = useState<string>();
 
   const [isAudioOn, setIsAudioOn] = useState(false);
-  const [isVideoOn, setIsVideoOn] = useState(false);
+  const [videoStatus, setVideoStatus] = useState(VideoStatus.Off);
   const [isChatShown, setIsChatShown] = useState(false);
   const [isFullscreen, setIsFullscreen] = useState(false);
   const [callStatus, setCallStatus] = useState(routeState?.callStatus);
@@ -187,14 +200,35 @@
   useEffect(() => {
     if (localStream) {
       for (const track of localStream.getVideoTracks()) {
-        track.enabled = isVideoOn;
+        track.enabled = videoStatus === VideoStatus.Camera;
         const deviceId = track.getSettings().deviceId;
         if (deviceId) {
           setVideoDeviceId(deviceId);
         }
       }
     }
-  }, [isVideoOn, localStream]);
+  }, [videoStatus, localStream]);
+
+  const updateVideoStatus = useCallback(
+    async (newStatus: ((prevState: VideoStatus) => VideoStatus) | VideoStatus) => {
+      if (typeof newStatus === 'function') {
+        newStatus = newStatus(videoStatus);
+      }
+
+      const stream = await updateScreenShare(newStatus === VideoStatus.ScreenShare);
+      if (stream) {
+        for (const track of stream.getTracks()) {
+          track.addEventListener('ended', () => {
+            console.warn('Browser ended screen sharing');
+            updateVideoStatus(VideoStatus.Off);
+          });
+        }
+      }
+
+      setVideoStatus(newStatus);
+    },
+    [videoStatus, updateScreenShare]
+  );
 
   useEffect(() => {
     const onFullscreenChange = () => {
@@ -220,7 +254,7 @@
           };
 
           setCallStatus(CallStatus.Ringing);
-          setIsVideoOn(withVideoOn);
+          setVideoStatus(withVideoOn ? VideoStatus.Camera : VideoStatus.Off);
           console.info('Sending CallBegin', callBegin);
           webSocket.send(WebSocketMessageType.CallBegin, callBegin);
         })
@@ -241,7 +275,7 @@
             conversationId,
           };
 
-          setIsVideoOn(withVideoOn);
+          setVideoStatus(withVideoOn ? VideoStatus.Camera : VideoStatus.Off);
           setCallStatus(CallStatus.Connecting);
           console.info('Sending CallAccept', callAccept);
           webSocket.send(WebSocketMessageType.CallAccept, callAccept);
@@ -324,7 +358,7 @@
       console.info('ICE connection disconnected or failed, ending call');
       endCall();
     }
-  }, [iceConnectionState, callStatus, isVideoOn, endCall]);
+  }, [iceConnectionState, callStatus, videoStatus, endCall]);
 
   useEffect(() => {
     const checkStatusTimeout = () => {
@@ -386,8 +420,8 @@
         currentMediaDeviceIds,
         isAudioOn,
         setIsAudioOn,
-        isVideoOn,
-        setIsVideoOn,
+        videoStatus,
+        updateVideoStatus,
         isChatShown,
         setIsChatShown,
         isFullscreen,
diff --git a/client/src/contexts/WebRtcProvider.tsx b/client/src/contexts/WebRtcProvider.tsx
index 59999b9..1ce3ff6 100644
--- a/client/src/contexts/WebRtcProvider.tsx
+++ b/client/src/contexts/WebRtcProvider.tsx
@@ -33,9 +33,11 @@
   iceConnectionState: RTCIceConnectionState | undefined;
 
   localStream: MediaStream | undefined;
+  screenShareLocalStream: MediaStream | undefined;
   remoteStreams: readonly MediaStream[] | undefined;
   getMediaDevices: () => Promise<MediaDevicesInfo>;
   updateLocalStream: (mediaDeviceIds?: MediaInputIds) => Promise<void>;
+  updateScreenShare: (active: boolean) => Promise<MediaStream | undefined>;
 
   sendWebRtcOffer: () => Promise<void>;
   closeConnection: () => void;
@@ -44,9 +46,11 @@
 const defaultWebRtcContext: IWebRtcContext = {
   iceConnectionState: undefined,
   localStream: undefined,
+  screenShareLocalStream: undefined,
   remoteStreams: undefined,
   getMediaDevices: async () => Promise.reject(),
   updateLocalStream: async () => Promise.reject(),
+  updateScreenShare: async () => Promise.reject(),
   sendWebRtcOffer: async () => Promise.reject(),
   closeConnection: () => {},
 };
@@ -101,6 +105,7 @@
 }) => {
   const { conversation, conversationId } = useConversationContext();
   const [localStream, setLocalStream] = useState<MediaStream>();
+  const [screenShareLocalStream, setScreenShareLocalStream] = useState<MediaStream>();
   const [remoteStreams, setRemoteStreams] = useState<readonly MediaStream[]>();
   const [iceConnectionState, setIceConnectionState] = useState<RTCIceConnectionState | undefined>();
 
@@ -183,14 +188,37 @@
     [getMediaDevices]
   );
 
+  const updateScreenShare = useCallback(
+    async (isOn: boolean) => {
+      if (isOn) {
+        const stream = await navigator.mediaDevices.getDisplayMedia({
+          video: true,
+          audio: false,
+        });
+
+        setScreenShareLocalStream(stream);
+        return stream;
+      } else {
+        if (screenShareLocalStream) {
+          for (const track of screenShareLocalStream.getTracks()) {
+            track.stop();
+          }
+        }
+
+        setScreenShareLocalStream(undefined);
+      }
+    },
+    [screenShareLocalStream]
+  );
+
   useEffect(() => {
-    if (!localStream || !webRtcConnection) {
+    if ((!localStream && !screenShareLocalStream) || !webRtcConnection) {
       return;
     }
 
-    const updateTracks = async (kind: 'audio' | 'video') => {
+    const updateTracks = async (stream: MediaStream, kind: 'audio' | 'video') => {
       const senders = kind === 'audio' ? audioRtcRtpSenders : videoRtcRtpSenders;
-      const tracks = kind === 'audio' ? localStream.getAudioTracks() : localStream.getVideoTracks();
+      const tracks = kind === 'audio' ? stream.getAudioTracks() : stream.getVideoTracks();
       if (senders) {
         const promises: Promise<void>[] = [];
         for (let i = 0; i < senders.length; i++) {
@@ -210,7 +238,7 @@
       // TODO: Currently, we do not support adding new devices. To enable this feature, we would need to implement
       //       the "Perfect negotiation" pattern to renegotiate after `addTrack`.
       //       https://blog.mozilla.org/webrtc/perfect-negotiation-in-webrtc/
-      const newSenders = tracks.map((track) => webRtcConnection.addTrack(track, localStream));
+      const newSenders = tracks.map((track) => webRtcConnection.addTrack(track, stream));
       if (kind === 'audio') {
         setAudioRtcRtpSenders(newSenders);
       } else {
@@ -218,9 +246,15 @@
       }
     };
 
-    updateTracks('audio');
-    updateTracks('video');
-  }, [localStream, webRtcConnection, audioRtcRtpSenders, videoRtcRtpSenders]);
+    if (localStream) {
+      updateTracks(localStream, 'audio');
+      updateTracks(localStream, 'video');
+    }
+
+    if (screenShareLocalStream) {
+      updateTracks(screenShareLocalStream, 'video');
+    }
+  }, [localStream, screenShareLocalStream, webRtcConnection, audioRtcRtpSenders, videoRtcRtpSenders]);
 
   const sendWebRtcOffer = useCallback(async () => {
     const sdp = await webRtcConnection.createOffer({
@@ -375,24 +409,35 @@
   }, [webRtcConnection]);
 
   const closeConnection = useCallback(() => {
-    const localTracks = localStream?.getTracks();
-    if (localTracks) {
-      for (const track of localTracks) {
-        track.stop();
+    const stopStream = (stream: MediaStream) => {
+      const localTracks = stream.getTracks();
+      if (localTracks) {
+        for (const track of localTracks) {
+          track.stop();
+        }
       }
+    };
+
+    if (localStream) {
+      stopStream(localStream);
+    }
+    if (screenShareLocalStream) {
+      stopStream(screenShareLocalStream);
     }
 
     webRtcConnection.close();
-  }, [webRtcConnection, localStream]);
+  }, [webRtcConnection, localStream, screenShareLocalStream]);
 
   return (
     <WebRtcContext.Provider
       value={{
         iceConnectionState,
         localStream,
+        screenShareLocalStream,
         remoteStreams,
         getMediaDevices,
         updateLocalStream,
+        updateScreenShare,
         sendWebRtcOffer,
         closeConnection,
       }}
diff --git a/client/src/pages/CallInterface.tsx b/client/src/pages/CallInterface.tsx
index f21c49c..10b1d3c 100644
--- a/client/src/pages/CallInterface.tsx
+++ b/client/src/pages/CallInterface.tsx
@@ -46,7 +46,7 @@
   CallingVolumeButton,
 } from '../components/CallButtons';
 import CallChatDrawer from '../components/CallChatDrawer';
-import { CallContext, CallStatus } from '../contexts/CallProvider';
+import { CallContext, CallStatus, VideoStatus } from '../contexts/CallProvider';
 import { useConversationContext } from '../contexts/ConversationProvider';
 import { WebRtcContext } from '../contexts/WebRtcProvider';
 import { VideoElementWithSinkId } from '../utils/utils';
@@ -85,24 +85,16 @@
 }
 
 const CallInterface = () => {
-  const { localStream, remoteStreams } = useContext(WebRtcContext);
+  const { remoteStreams } = useContext(WebRtcContext);
   const {
-    isVideoOn,
     currentMediaDeviceIds: {
       audiooutput: { id: audioOutDeviceId },
     },
   } = useContext(CallContext);
-  const localVideoRef = useRef<VideoElementWithSinkId | null>(null);
   const remoteVideoRef = useRef<VideoElementWithSinkId | null>(null);
   const gridItemRef = useRef<HTMLDivElement | null>(null);
 
   useEffect(() => {
-    if (localStream && localVideoRef.current) {
-      localVideoRef.current.srcObject = localStream;
-    }
-  }, [localStream, localVideoRef]);
-
-  useEffect(() => {
     // TODO: For now, `remoteStream` is the first remote stream in the array.
     //       There should only be one in the array, but we should make sure this is right.
     const remoteStream = remoteStreams?.at(0);
@@ -136,21 +128,15 @@
         {/* Guest video, takes the whole screen */}
         <CallInterfaceInformation />
         <Box flexGrow={1} marginY={2} position="relative">
-          <Draggable bounds="parent" nodeRef={localVideoRef ?? undefined}>
-            <video
-              ref={localVideoRef}
-              autoPlay
-              muted
-              style={{
-                position: 'absolute',
-                right: 0,
-                borderRadius: '12px',
-                maxHeight: '50%',
-                maxWidth: '50%',
-                visibility: isVideoOn ? 'visible' : 'hidden',
-              }}
-            />
-          </Draggable>
+          <Box
+            sx={{
+              position: 'absolute',
+              width: '100%',
+              height: '100%',
+            }}
+          >
+            <LocalVideo />
+          </Box>
         </Box>
         <Grid container>
           <Grid item xs />
@@ -168,6 +154,45 @@
   );
 };
 
+const LocalVideo = () => {
+  const { localStream, screenShareLocalStream } = useContext(WebRtcContext);
+  const { videoStatus } = useContext(CallContext);
+  const videoRef = useRef<VideoElementWithSinkId | null>(null);
+
+  const stream = useMemo(() => {
+    switch (videoStatus) {
+      case VideoStatus.Camera:
+        return localStream;
+      case VideoStatus.ScreenShare:
+        return screenShareLocalStream;
+    }
+  }, [videoStatus, localStream, screenShareLocalStream]);
+
+  useEffect(() => {
+    if (stream && videoRef.current) {
+      videoRef.current.srcObject = stream;
+    }
+  }, [stream, videoRef]);
+
+  return (
+    <Draggable bounds="parent" nodeRef={videoRef ?? undefined}>
+      <video
+        ref={videoRef}
+        autoPlay
+        muted
+        style={{
+          position: 'absolute',
+          borderRadius: '12px',
+          maxHeight: '50%',
+          maxWidth: '50%',
+          right: 0,
+          visibility: stream ? 'visible' : 'hidden',
+        }}
+      />
+    </Draggable>
+  );
+};
+
 const formatElapsedSeconds = (elapsedSeconds: number): string => {
   const seconds = Math.floor(elapsedSeconds % 60);
   elapsedSeconds = Math.floor(elapsedSeconds / 60);
diff --git a/client/src/utils/utils.ts b/client/src/utils/utils.ts
index 770c7a8..d51fa1b 100644
--- a/client/src/utils/utils.ts
+++ b/client/src/utils/utils.ts
@@ -21,7 +21,9 @@
   children: ReactNode;
 };
 
+type AsyncDispatch<A> = (value: A) => Promise<void>;
 export type SetState<T> = Dispatch<SetStateAction<T>>;
+export type AsyncSetState<T> = AsyncDispatch<SetStateAction<T>>;
 
 /**
  * HTMLVideoElement with the `sinkId` and `setSinkId` optional properties.