Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change Media Devices Mid-call #1977

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions spec/unit/webrtc/call.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ class MockMediaStream {
addEventListener() {}
}

class MockMediaDeviceInfo {
constructor(
public kind: "audio" | "video",
) {}
}

describe('Call', function() {
let client;
let call;
Expand All @@ -110,6 +116,8 @@ describe('Call', function() {
mediaDevices: {
// @ts-ignore Mock
getUserMedia: () => new MockMediaStream("local_stream"),
// @ts-ignore Mock
enumerateDevices: async () => [new MockMediaDeviceInfo("audio"), new MockMediaDeviceInfo("video")],
},
};

Expand Down
2 changes: 1 addition & 1 deletion src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -743,7 +743,7 @@ export class MatrixClient extends EventEmitter {
protected checkTurnServersIntervalID: number;
protected exportedOlmDeviceToImport: IOlmDevice;
protected txnCtr = 0;
protected mediaHandler = new MediaHandler();
protected mediaHandler = new MediaHandler(this);

constructor(opts: IMatrixClientCreateOpts) {
super();
Expand Down
87 changes: 81 additions & 6 deletions src/webrtc/call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -562,8 +562,8 @@ export class MatrixCall extends EventEmitter {
this.feeds.push(new CallFeed({
client: this.client,
roomId: this.roomId,
audioMuted: false,
videoMuted: false,
audioMuted: stream.getAudioTracks().length === 0,
videoMuted: stream.getVideoTracks().length === 0,
userId,
stream,
purpose,
Expand Down Expand Up @@ -752,19 +752,30 @@ export class MatrixCall extends EventEmitter {
logger.debug(`Answering call ${this.callId}`);

if (!this.localUsermediaStream && !this.waitForLocalAVStream) {
const prevState = this.state;
const answerWithAudio = this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio");
const answerWithVideo = this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video");

this.setState(CallState.WaitLocalMedia);
this.waitForLocalAVStream = true;

try {
const mediaStream = await this.client.getMediaHandler().getUserMediaStream(
this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio"),
this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video"),
answerWithAudio, answerWithVideo,
);
this.waitForLocalAVStream = false;
this.gotUserMediaForAnswer(mediaStream);
} catch (e) {
this.getUserMediaFailed(e);
return;
if (answerWithVideo) {
// Try to answer without video
logger.warn("Failed to getUserMedia(), trying to getUserMedia() without video");
this.setState(prevState);
this.waitForLocalAVStream = false;
await this.answer(answerWithAudio, false);
} else {
this.getUserMediaFailed(e);
return;
}
}
} else if (this.waitForLocalAVStream) {
this.setState(CallState.WaitLocalMedia);
Expand Down Expand Up @@ -987,12 +998,72 @@ export class MatrixCall extends EventEmitter {
}
}

/**
* Request a new local usermedia stream with the current device id.
*/
public async updateLocalUsermediaStream() {
const oldStream = this.localUsermediaStream;

const stream = await this.client.getMediaHandler().getUserMediaStream(
this.hasLocalUserMediaAudioTrack,
this.hasLocalUserMediaVideoTrack,
true,
);

const callFeed = this.localUsermediaFeed;

callFeed.setNewStream(stream);

const newSenders = [];

for (const track of stream.getTracks()) {
const oldSender = this.usermediaSenders.find((sender) => {
return sender.track?.kind === track.kind;
});

let newSender: RTCRtpSender;

try {
logger.info(
`Replacing track (` +
`id="${track.id}", ` +
`kind="${track.kind}", ` +
`streamId="${stream}", ` +
`streamPurpose="${callFeed.purpose}"` +
`) to peer connection`,
);
await oldSender.replaceTrack(track);
newSender = oldSender;
} catch (error) {
logger.info(
`Adding track (` +
`id="${track.id}", ` +
`kind="${track.kind}", ` +
`streamId="${stream}", ` +
`streamPurpose="${callFeed.purpose}"` +
`) to peer connection`,
);
newSender = this.peerConn.addTrack(track, stream);
}

newSenders.push(newSender);
}

this.usermediaSenders = newSenders;

this.client.getMediaHandler().stopUserMediaStream(oldStream);
}

/**
* Set whether our outbound video should be muted or not.
* @param {boolean} muted True to mute the outbound video.
* @returns the new mute state
*/
public async setLocalVideoMuted(muted: boolean): Promise<boolean> {
if (!await this.client.getMediaHandler().hasVideoDevice()) {
return this.isLocalVideoMuted();
}

if (!this.hasLocalUserMediaVideoTrack && !muted) {
await this.upgradeCall(false, true);
return this.isLocalVideoMuted();
Expand Down Expand Up @@ -1021,6 +1092,10 @@ export class MatrixCall extends EventEmitter {
* @returns the new mute state
*/
public async setMicrophoneMuted(muted: boolean): Promise<boolean> {
if (!await this.client.getMediaHandler().hasAudioDevice()) {
return this.isMicrophoneMuted();
}

if (!this.hasLocalUserMediaAudioTrack && !muted) {
await this.upgradeCall(true, false);
return this.isMicrophoneMuted();
Expand Down
39 changes: 32 additions & 7 deletions src/webrtc/mediaHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/

import { MatrixClient } from "../client";
import { logger } from "../logger";

export class MediaHandler {
Expand All @@ -25,41 +26,65 @@ export class MediaHandler {
private userMediaStreams: MediaStream[] = [];
private screensharingStreams: MediaStream[] = [];

constructor(private client: MatrixClient) {}

/**
* Set an audio input device to use for MatrixCalls
* @param {string} deviceId the identifier for the device
* undefined treated as unset
*/
public setAudioInput(deviceId: string): void {
public async setAudioInput(deviceId: string): Promise<void> {
this.audioInput = deviceId;

await Promise.all(Array.from(this.client.callEventHandler.calls.values()).map((call) => {
return call.updateLocalUsermediaStream();
}));
}

/**
* Set a video input device to use for MatrixCalls
* @param {string} deviceId the identifier for the device
* undefined treated as unset
*/
public setVideoInput(deviceId: string): void {
public async setVideoInput(deviceId: string): Promise<void> {
this.videoInput = deviceId;

await Promise.all(Array.from(this.client.callEventHandler.calls.values()).map((call) => {
return call.updateLocalUsermediaStream();
}));
}

public async hasAudioDevice(): Promise<boolean> {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === "audioinput").length > 0;
}

public async hasVideoDevice(): Promise<boolean> {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === "videoinput").length > 0;
}

/**
* @returns {MediaStream} based on passed parameters
*/
public async getUserMediaStream(audio: boolean, video: boolean): Promise<MediaStream> {
public async getUserMediaStream(audio: boolean, video: boolean, forceNewStream = false): Promise<MediaStream> {
const shouldRequestAudio = audio && await this.hasAudioDevice();
const shouldRequestVideo = video && await this.hasVideoDevice();

let stream: MediaStream;

// Find a stream with matching tracks
const matchingStream = this.userMediaStreams.find((stream) => {
if (audio !== (stream.getAudioTracks().length > 0)) return false;
if (video !== (stream.getVideoTracks().length > 0)) return false;
if (shouldRequestAudio !== (stream.getAudioTracks().length > 0)) return false;
if (shouldRequestVideo !== (stream.getVideoTracks().length > 0)) return false;
return true;
});

if (matchingStream) {
if (matchingStream && !forceNewStream) {
logger.log("Cloning user media stream", matchingStream.id);
stream = matchingStream.clone();
} else {
const constraints = this.getUserMediaContraints(audio, video);
const constraints = this.getUserMediaContraints(shouldRequestAudio, shouldRequestVideo);
logger.log("Getting user media with constraints", constraints);
stream = await navigator.mediaDevices.getUserMedia(constraints);
}
Expand Down