mirror of https://github.com/OpenVidu/openvidu.git
openvidu-components: Added replace track e2e tests
parent
55db788f50
commit
c52f632d7c
|
@ -71,7 +71,7 @@ jobs:
|
|||
name: openvidu-browser
|
||||
path: openvidu-components-angular
|
||||
- name: Run Browserless Chrome
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.53-chrome-stable
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.57-chrome-stable
|
||||
- name: Run openvidu-server-kms
|
||||
run: |
|
||||
docker run -p 4443:4443 --rm -d \
|
||||
|
@ -106,7 +106,7 @@ jobs:
|
|||
name: openvidu-browser
|
||||
path: openvidu-components-angular
|
||||
- name: Run Browserless Chrome
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.53-chrome-stable
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.57-chrome-stable
|
||||
- name: Run openvidu-server-kms
|
||||
run: |
|
||||
docker run -p 4443:4443 --rm -d \
|
||||
|
@ -140,7 +140,7 @@ jobs:
|
|||
name: openvidu-browser
|
||||
path: openvidu-components-angular
|
||||
- name: Run Browserless Chrome
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.53-chrome-stable
|
||||
run: docker run -d -p 3000:3000 --network host browserless/chrome:1.57-chrome-stable
|
||||
- name: Install openvidu-browser and dependencies
|
||||
run: |
|
||||
cd openvidu-components-angular
|
||||
|
|
|
@ -2,7 +2,7 @@ import { expect } from 'chai';
|
|||
import { By, until, WebDriver, WebElement } from 'selenium-webdriver';
|
||||
|
||||
export class OpenViduComponentsPO {
|
||||
private TIMEOUT = 30 * 1000;
|
||||
private TIMEOUT = 10 * 1000;
|
||||
private POLL_TIMEOUT = 1 * 1000;
|
||||
|
||||
constructor(private browser: WebDriver) {}
|
||||
|
@ -16,7 +16,7 @@ export class OpenViduComponentsPO {
|
|||
);
|
||||
}
|
||||
|
||||
async getNumberOfElements(selector: string){
|
||||
async getNumberOfElements(selector: string): Promise<number> {
|
||||
return (await this.browser.findElements(By.css(selector))).length;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import monkeyPatchMediaDevices from './utils/media-devices.js';
|
||||
|
||||
var MINIMAL;
|
||||
var LANG;
|
||||
var CAPTIONS_LANG;
|
||||
|
@ -29,6 +31,7 @@ var CAPTIONS_BUTTON;
|
|||
|
||||
var SINGLE_TOKEN;
|
||||
var SESSION_NAME;
|
||||
var FAKE_DEVICES;
|
||||
|
||||
var PARTICIPANT_NAME;
|
||||
|
||||
|
@ -43,6 +46,8 @@ $(document).ready(() => {
|
|||
|
||||
SINGLE_TOKEN = url.searchParams.get('singleToken') === null ? false : url.searchParams.get('singleToken') === 'true';
|
||||
|
||||
FAKE_DEVICES = url.searchParams.get('fakeDevices') === null ? false : url.searchParams.get('fakeDevices') === 'true';
|
||||
|
||||
// Directives
|
||||
MINIMAL = url.searchParams.get('minimal') === null ? false : url.searchParams.get('minimal') === 'true';
|
||||
LANG = url.searchParams.get('lang') || 'en';
|
||||
|
@ -197,6 +202,11 @@ function appendElement(id) {
|
|||
async function joinSession(sessionName, participantName) {
|
||||
var webComponent = document.querySelector('openvidu-webcomponent');
|
||||
var tokens;
|
||||
|
||||
if (FAKE_DEVICES) {
|
||||
monkeyPatchMediaDevices();
|
||||
}
|
||||
|
||||
if (SINGLE_TOKEN) {
|
||||
tokens = await getToken(sessionName);
|
||||
} else {
|
||||
|
|
|
@ -8,7 +8,10 @@
|
|||
crossorigin="anonymous"
|
||||
></script>
|
||||
|
||||
<script src="app.js"></script>
|
||||
<script type="module" src="utils/filter-stream.js"></script>
|
||||
<script type="module" src="utils/shader-renderer.js"></script>
|
||||
<script type="module" src="utils/media-devices.js"></script>
|
||||
<script type="module" src="app.js"></script>
|
||||
<script src="openvidu-webcomponent-dev.js"></script>
|
||||
<link rel="stylesheet" href="openvidu-webcomponent-dev.css" />
|
||||
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
class FilterStream {
|
||||
constructor(stream, label) {
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
const { width, height } = videoTrack.getSettings();
|
||||
const canvas = document.createElement('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
const video = document.createElement('video');
|
||||
video.srcObject = new MediaStream([videoTrack]);
|
||||
video.play();
|
||||
|
||||
video.addEventListener('play', () => {
|
||||
const loop = () => {
|
||||
if (!video.paused && !video.ended) {
|
||||
ctx.filter = 'grayscale(100%)';
|
||||
ctx.drawImage(video, 0, 0, video.videoWidth, video.videoHeight, 0, 0, video.videoWidth, video.videoHeight);
|
||||
setTimeout(loop, 33);
|
||||
}
|
||||
};
|
||||
loop();
|
||||
});
|
||||
this.outputStream = canvas.captureStream();
|
||||
|
||||
Object.defineProperty(this.outputStream.getVideoTracks()[0], 'label', {
|
||||
writable: true,
|
||||
value: label
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { FilterStream };
|
|
@ -0,0 +1,62 @@
|
|||
// Ideally we'd use an editor or import shaders directly from the API.
|
||||
import { FilterStream } from './filter-stream.js';
|
||||
|
||||
export default function monkeyPatchMediaDevices() {
|
||||
const enumerateDevicesFn = MediaDevices.prototype.enumerateDevices;
|
||||
const getUserMediaFn = MediaDevices.prototype.getUserMedia;
|
||||
const getDisplayMediaFn = MediaDevices.prototype.getDisplayMedia;
|
||||
|
||||
const fakeDevice = {
|
||||
deviceId: 'virtual',
|
||||
groupID: '',
|
||||
kind: 'videoinput',
|
||||
label: 'custom_fake_video_1'
|
||||
};
|
||||
|
||||
MediaDevices.prototype.enumerateDevices = async function () {
|
||||
const res = await enumerateDevicesFn.call(navigator.mediaDevices);
|
||||
res.push(fakeDevice);
|
||||
return res;
|
||||
};
|
||||
|
||||
MediaDevices.prototype.getUserMedia = async function () {
|
||||
const args = arguments[0];
|
||||
const { deviceId, advanced, width, height } = args.video;
|
||||
if (deviceId === 'virtual' || deviceId?.exact === 'virtual') {
|
||||
const constraints = {
|
||||
video: {
|
||||
facingMode: args.facingMode,
|
||||
advanced,
|
||||
width,
|
||||
height
|
||||
},
|
||||
audio: false
|
||||
};
|
||||
const res = await getUserMediaFn.call(navigator.mediaDevices, constraints);
|
||||
|
||||
if (res) {
|
||||
const filter = new FilterStream(res, fakeDevice.label);
|
||||
return filter.outputStream;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
return getUserMediaFn.call(navigator.mediaDevices, ...arguments);
|
||||
};
|
||||
|
||||
MediaDevices.prototype.getDisplayMedia = async function () {
|
||||
const { video, audio } = arguments[0];
|
||||
|
||||
const screenVideoElement = document.getElementsByClassName("OT_video-element screen-type")[0];
|
||||
const currentTrackLabel = screenVideoElement?.srcObject?.getVideoTracks()[0]?.label;
|
||||
const res = await getDisplayMediaFn.call(navigator.mediaDevices, { video, audio });
|
||||
|
||||
if (res && currentTrackLabel && currentTrackLabel !== 'custom_fake_screen') {
|
||||
const filter = new FilterStream(res, 'custom_fake_screen');
|
||||
return filter.outputStream;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
import { expect } from 'chai';
|
||||
import { Builder, By, Key, WebDriver } from 'selenium-webdriver';
|
||||
import { Builder, Key, WebDriver } from 'selenium-webdriver';
|
||||
import { OPENVIDU_SECRET, OPENVIDU_SERVER_URL } from './config';
|
||||
import { getBrowserOptionsWithoutDevices, WebComponentConfig } from './selenium.conf';
|
||||
import { OpenViduComponentsPO } from './utils.po.test';
|
||||
|
@ -88,8 +88,7 @@ describe('Testing API Directives', () => {
|
|||
expect(await utils.isPresent('#session-name')).to.be.false;
|
||||
|
||||
// Checking if nickname is not displayed
|
||||
await browser.findElements(By.id('nickname-container'));
|
||||
expect(await utils.isPresent('#nickname-container')).to.be.false;
|
||||
expect(await utils.getNumberOfElements('#nickname-container')).equals(0);
|
||||
|
||||
// Checking if audio detection is not displayed
|
||||
expect(await utils.isPresent('#audio-wave-container')).to.be.false;
|
||||
|
@ -258,9 +257,8 @@ describe('Testing API Directives', () => {
|
|||
// Checking if fullscreen button is not present
|
||||
await utils.waitForElement('.mat-menu-content');
|
||||
expect(await utils.isPresent('.mat-menu-content')).to.be.true;
|
||||
expect(await utils.getNumberOfElements('#fullscreen-btn')).equals(0);
|
||||
|
||||
await browser.findElements(By.id('fullscreen-btn'));
|
||||
expect(await utils.isPresent('#fullscreen-btn')).to.be.false;
|
||||
});
|
||||
|
||||
it('should HIDE the CAPTIONS button', async () => {
|
||||
|
@ -364,8 +362,7 @@ describe('Testing API Directives', () => {
|
|||
await utils.checkToolbarIsPresent();
|
||||
|
||||
// Checking if leave button is not present
|
||||
await browser.findElements(By.id('leave-btn'));
|
||||
expect(await utils.isPresent('#leave-btn')).to.be.false;
|
||||
expect(await utils.getNumberOfElements('#leave-btn')).equals(0);
|
||||
});
|
||||
|
||||
it('should HIDE the ACTIVITIES PANEL button', async () => {
|
||||
|
@ -1042,6 +1039,133 @@ describe('Testing videoconference EVENTS', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Testing replace track with emulated devices', () => {
|
||||
let browser: WebDriver;
|
||||
let utils: OpenViduComponentsPO;
|
||||
async function createChromeBrowser(): Promise<WebDriver> {
|
||||
return await new Builder()
|
||||
.forBrowser(WebComponentConfig.browserName)
|
||||
.withCapabilities(WebComponentConfig.browserCapabilities)
|
||||
.setChromeOptions(WebComponentConfig.browserOptions)
|
||||
.usingServer(WebComponentConfig.seleniumAddress)
|
||||
.build();
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
browser = await createChromeBrowser();
|
||||
utils = new OpenViduComponentsPO(browser);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// console.log('data:image/png;base64,' + await browser.takeScreenshot());
|
||||
await browser.quit();
|
||||
});
|
||||
|
||||
it('should replace the video track in prejoin page', async () => {
|
||||
const script = 'return document.getElementsByTagName("video")[0].srcObject.getVideoTracks()[0].label;';
|
||||
|
||||
await browser.get(`${url}&fakeDevices=true`);
|
||||
|
||||
let videoDevices = await utils.waitForElement('#video-devices-form');
|
||||
|
||||
await videoDevices.click();
|
||||
|
||||
let element = await utils.waitForElement('#option-custom_fake_video_1');
|
||||
|
||||
await element.click();
|
||||
|
||||
let videoLabel;
|
||||
|
||||
await browser.sleep(1000);
|
||||
videoLabel = await browser.executeScript<string>(script);
|
||||
expect(videoLabel).to.be.equal('custom_fake_video_1');
|
||||
|
||||
await videoDevices.click();
|
||||
|
||||
element = await utils.waitForElement('#option-fake_device_0');
|
||||
await element.click();
|
||||
|
||||
await browser.sleep(1000);
|
||||
videoLabel = await browser.executeScript<string>(script);
|
||||
expect(videoLabel).to.be.equal('fake_device_0');
|
||||
});
|
||||
|
||||
it('should replace the video track in videoconference page', async () => {
|
||||
const script = 'return document.getElementsByTagName("video")[0].srcObject.getVideoTracks()[0].label;';
|
||||
|
||||
await browser.get(`${url}&prejoin=false&fakeDevices=true`);
|
||||
|
||||
await utils.checkSessionIsPresent();
|
||||
|
||||
// Checking if toolbar is present
|
||||
await utils.checkToolbarIsPresent();
|
||||
|
||||
// Open more options menu
|
||||
await utils.clickOn('#more-options-btn');
|
||||
|
||||
// Checking if button panel is present
|
||||
await utils.waitForElement('.mat-menu-content');
|
||||
expect(await utils.isPresent('.mat-menu-content')).to.be.true;
|
||||
|
||||
await utils.clickOn('#toolbar-settings-btn');
|
||||
|
||||
await utils.waitForElement('.settings-container');
|
||||
expect(await utils.isPresent('.settings-container')).to.be.true;
|
||||
|
||||
await utils.clickOn('#video-opt');
|
||||
expect(await utils.isPresent('ov-video-devices-select')).to.be.true;
|
||||
|
||||
let videoDevices = await utils.waitForElement('#video-devices-form');
|
||||
|
||||
await videoDevices.click();
|
||||
|
||||
let element = await utils.waitForElement('#option-custom_fake_video_1');
|
||||
|
||||
await element.click();
|
||||
|
||||
let videoLabel;
|
||||
await browser.sleep(1000);
|
||||
videoLabel = await browser.executeScript<string>(script);
|
||||
expect(videoLabel).to.be.equal('custom_fake_video_1');
|
||||
|
||||
await videoDevices.click();
|
||||
|
||||
element = await utils.waitForElement('#option-fake_device_0');
|
||||
await element.click();
|
||||
|
||||
await browser.sleep(1000);
|
||||
videoLabel = await browser.executeScript<string>(script);
|
||||
expect(videoLabel).to.be.equal('fake_device_0');
|
||||
});
|
||||
|
||||
it('should replace the screen track', async () => {
|
||||
const script = 'return document.getElementsByClassName("OT_video-element screen-type")[0].srcObject.getVideoTracks()[0].label;';
|
||||
|
||||
await browser.get(`${url}&prejoin=false&fakeDevices=true`);
|
||||
|
||||
await utils.checkLayoutPresent();
|
||||
await utils.checkToolbarIsPresent();
|
||||
|
||||
await utils.clickOn('#screenshare-btn');
|
||||
|
||||
await browser.sleep(500);
|
||||
|
||||
let screenLabel = await browser.executeScript<string>(script);
|
||||
expect(screenLabel).not.equal('custom_fake_screen');
|
||||
|
||||
await utils.clickOn('#video-settings-btn-SCREEN');
|
||||
await browser.sleep(500);
|
||||
|
||||
await utils.waitForElement('.video-settings-menu');
|
||||
const replaceBtn = await utils.waitForElement('#replace-screen-button');
|
||||
await replaceBtn.sendKeys(Key.ENTER);
|
||||
|
||||
await browser.sleep(1000);
|
||||
screenLabel = await browser.executeScript<string>(script);
|
||||
expect(screenLabel).to.be.equal('custom_fake_screen');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Testing stream video menu features', () => {
|
||||
let browser: WebDriver;
|
||||
let utils: OpenViduComponentsPO;
|
||||
|
@ -1102,7 +1226,6 @@ describe('Testing screenshare features', () => {
|
|||
|
||||
|
||||
it('should toggle screensharing twice', async () => {
|
||||
let element;
|
||||
await browser.get(`${url}&prejoin=false`);
|
||||
await utils.checkLayoutPresent();
|
||||
|
||||
|
@ -1112,30 +1235,27 @@ describe('Testing screenshare features', () => {
|
|||
await screenshareButton.click();
|
||||
|
||||
await utils.waitForElement('.OV_big');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
|
||||
|
||||
// Clicking to screensharing button
|
||||
await screenshareButton.click();
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
|
||||
// toggle screenshare again
|
||||
await screenshareButton.click();
|
||||
|
||||
await utils.waitForElement('.OV_big');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
|
||||
await screenshareButton.click();
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
});
|
||||
|
||||
it('should show only screen if toggle screensharing with video muted', async () => {
|
||||
let element;
|
||||
await browser.get(`${url}&prejoin=false`);
|
||||
|
||||
await utils.checkLayoutPresent();
|
||||
|
@ -1150,17 +1270,19 @@ describe('Testing screenshare features', () => {
|
|||
|
||||
await browser.sleep(1000);
|
||||
await utils.waitForElement('.OV_big');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
|
||||
await screenshareButton.click();
|
||||
await browser.sleep(1000);
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
});
|
||||
|
||||
it('should screensharing with audio muted', async () => {
|
||||
let element, isAudioEnabled;
|
||||
let isAudioEnabled;
|
||||
const getAudioScript = (className: string) => {
|
||||
return `return document.getElementsByClassName('${className}')[0].srcObject.getAudioTracks()[0].enabled;`;
|
||||
};
|
||||
|
@ -1177,25 +1299,22 @@ describe('Testing screenshare features', () => {
|
|||
await screenshareButton.click();
|
||||
|
||||
await utils.waitForElement('.screen-type');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
|
||||
|
||||
isAudioEnabled = await browser.executeScript(getAudioScript('screen-type'));
|
||||
expect(isAudioEnabled).to.be.false;
|
||||
|
||||
await utils.waitForElement('#statusMic');
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(2);
|
||||
|
||||
// Clicking to screensharing button
|
||||
await screenshareButton.click();
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
});
|
||||
|
||||
it('should show and hide CAMERA stream when muting video with screensharing', async () => {
|
||||
let element;
|
||||
await browser.get(`${url}&prejoin=false`);
|
||||
|
||||
await utils.checkLayoutPresent();
|
||||
|
@ -1206,14 +1325,12 @@ describe('Testing screenshare features', () => {
|
|||
await screenshareButton.click();
|
||||
|
||||
await utils.waitForElement('.OV_big');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
|
||||
const muteVideoButton = await utils.waitForElement('#camera-btn');
|
||||
await muteVideoButton.click();
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
});
|
||||
|
||||
it('should screenshare has audio active when camera is muted', async () => {
|
||||
|
@ -1229,19 +1346,15 @@ describe('Testing screenshare features', () => {
|
|||
expect(await utils.isPresent('#screenshare-btn')).to.be.true;
|
||||
await screenshareButton.click();
|
||||
|
||||
element = await utils.waitForElement('.OV_big');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(1);
|
||||
await utils.waitForElement('.OV_big');
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(1);
|
||||
|
||||
// Muting camera video
|
||||
const muteVideoButton = await utils.waitForElement('#camera-btn');
|
||||
await muteVideoButton.click();
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
|
||||
await browser.sleep(500);
|
||||
expect(await utils.isPresent('#statusMic')).to.be.false;
|
||||
|
@ -1253,12 +1366,9 @@ describe('Testing screenshare features', () => {
|
|||
// Unmuting camera
|
||||
await muteVideoButton.click();
|
||||
|
||||
element = await utils.waitForElement('.camera-type');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(1);
|
||||
await utils.waitForElement('.camera-type');
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(1);
|
||||
});
|
||||
|
||||
it('should camera come back with audio muted when screensharing', async () => {
|
||||
|
@ -1277,19 +1387,16 @@ describe('Testing screenshare features', () => {
|
|||
await screenshareButton.click();
|
||||
|
||||
await utils.waitForElement('.screen-type');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(1);
|
||||
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(1);
|
||||
|
||||
// Mute camera
|
||||
const muteVideoButton = await utils.waitForElement('#camera-btn');
|
||||
await muteVideoButton.click();
|
||||
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(1);
|
||||
|
||||
expect(await utils.getNumberOfElements('video')).equals(1);
|
||||
expect(await utils.isPresent('#statusMic')).to.be.false;
|
||||
|
||||
// Checking if audio is muted after join the room
|
||||
|
@ -1301,8 +1408,8 @@ describe('Testing screenshare features', () => {
|
|||
await muteAudioButton.click();
|
||||
|
||||
await utils.waitForElement('#statusMic');
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(1);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(1);
|
||||
|
||||
|
||||
isAudioEnabled = await browser.executeScript(getAudioScript('screen-type'));
|
||||
expect(isAudioEnabled).to.be.false;
|
||||
|
@ -1311,11 +1418,9 @@ describe('Testing screenshare features', () => {
|
|||
await muteVideoButton.click();
|
||||
|
||||
await utils.waitForElement('.camera-type');
|
||||
element = await browser.findElements(By.css('video'));
|
||||
expect(element.length).equals(2);
|
||||
expect(await utils.getNumberOfElements('video')).equals(2);
|
||||
expect(await utils.getNumberOfElements('#statusMic')).equals(2);
|
||||
|
||||
element = await browser.findElements(By.id('statusMic'));
|
||||
expect(element.length).equals(2);
|
||||
|
||||
isAudioEnabled = await browser.executeScript(getAudioScript('camera-type'));
|
||||
expect(isAudioEnabled).to.be.false;
|
||||
|
@ -1513,9 +1618,7 @@ describe('Testing panels', () => {
|
|||
|
||||
// Close chat panel
|
||||
await chatButton.click();
|
||||
await browser.findElements(By.className('input-container'));
|
||||
expect(await utils.isPresent('.input-container')).to.be.false;
|
||||
|
||||
expect(await utils.getNumberOfElements('.input-container')).equals(0);
|
||||
expect(await utils.isPresent('messages-container')).to.be.false;
|
||||
});
|
||||
|
||||
|
|
|
@ -23,16 +23,17 @@
|
|||
videocam_off
|
||||
</mat-icon>
|
||||
</button>
|
||||
<mat-form-field>
|
||||
<mat-form-field id="video-devices-form">
|
||||
<mat-label *ngIf="hasVideoDevices">{{ 'PREJOIN.VIDEO_DEVICE' | translate }}</mat-label>
|
||||
<mat-label *ngIf="!hasVideoDevices">{{ 'PREJOIN.NO_VIDEO_DEVICE' | translate }}</mat-label>
|
||||
<mat-select
|
||||
[disabled]="isVideoMuted || !hasVideoDevices"
|
||||
[value]="cameraSelected?.device"
|
||||
[compareWith]="compareObjectDevices"
|
||||
[value]="cameraSelected"
|
||||
(click)="onDeviceSelectorClicked.emit()"
|
||||
(selectionChange)="onCameraSelected($event)"
|
||||
>
|
||||
<mat-option *ngFor="let camera of cameras" [value]="camera.device">
|
||||
<mat-option *ngFor="let camera of cameras" [value]="camera" id="option-{{camera.label}}">
|
||||
{{ camera.label }}
|
||||
</mat-option>
|
||||
</mat-select>
|
||||
|
|
|
@ -21,8 +21,8 @@ import { VirtualBackgroundService } from '../../../services/virtual-background/v
|
|||
styleUrls: ['./video-devices.component.css']
|
||||
})
|
||||
export class VideoDevicesComponent implements OnInit, OnDestroy {
|
||||
@Output() onDeviceSelectorClicked = new EventEmitter<void>();
|
||||
@Output() onVideoMutedClicked = new EventEmitter<boolean>();
|
||||
@Output() onDeviceSelectorClicked = new EventEmitter<void>();
|
||||
@Output() onVideoMutedClicked = new EventEmitter<boolean>();
|
||||
|
||||
videoMuteChanging: boolean;
|
||||
isVideoMuted: boolean;
|
||||
|
@ -47,9 +47,8 @@ export class VideoDevicesComponent implements OnInit, OnDestroy {
|
|||
await this.deviceSrv.refreshDevices();
|
||||
}
|
||||
|
||||
|
||||
this.hasVideoDevices = this.deviceSrv.hasVideoDeviceAvailable();
|
||||
if(this.hasVideoDevices){
|
||||
if (this.hasVideoDevices) {
|
||||
this.cameras = this.deviceSrv.getCameras();
|
||||
this.cameraSelected = this.deviceSrv.getCameraSelected();
|
||||
}
|
||||
|
@ -76,18 +75,19 @@ export class VideoDevicesComponent implements OnInit, OnDestroy {
|
|||
}
|
||||
|
||||
async onCameraSelected(event: any) {
|
||||
const videoSource = event?.value;
|
||||
const device: CustomDevice = event?.value;
|
||||
|
||||
// Is New deviceId different from the old one?
|
||||
if (this.deviceSrv.needUpdateVideoTrack(videoSource)) {
|
||||
const mirror = this.deviceSrv.cameraNeedsMirror(videoSource);
|
||||
if (this.deviceSrv.needUpdateVideoTrack(device)) {
|
||||
const mirror = this.deviceSrv.cameraNeedsMirror(device.device);
|
||||
// Reapply Virtual Background to new Publisher if necessary
|
||||
const backgroundSelected = this.backgroundService.backgroundSelected.getValue();
|
||||
const isBackgroundApplied = this.backgroundService.isBackgroundApplied()
|
||||
const isBackgroundApplied = this.backgroundService.isBackgroundApplied();
|
||||
|
||||
if (isBackgroundApplied) {
|
||||
await this.backgroundService.removeBackground();
|
||||
}
|
||||
const pp: PublisherProperties = { videoSource, audioSource: false, mirror };
|
||||
const pp: PublisherProperties = { videoSource: device.device, audioSource: false, mirror };
|
||||
await this.openviduService.replaceTrack(VideoType.CAMERA, pp);
|
||||
|
||||
if (isBackgroundApplied) {
|
||||
|
@ -97,11 +97,19 @@ export class VideoDevicesComponent implements OnInit, OnDestroy {
|
|||
}
|
||||
}
|
||||
|
||||
this.deviceSrv.setCameraSelected(videoSource);
|
||||
this.deviceSrv.setCameraSelected(device.device);
|
||||
this.cameraSelected = this.deviceSrv.getCameraSelected();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Compare two devices to check if they are the same. Used by the mat-select
|
||||
*/
|
||||
compareObjectDevices(o1: CustomDevice, o2: CustomDevice): boolean {
|
||||
return o1.label === o2.label;
|
||||
}
|
||||
|
||||
protected subscribeToParticipantMediaProperties() {
|
||||
this.localParticipantSubscription = this.participantService.localParticipantObs.subscribe((p: ParticipantAbstractModel) => {
|
||||
if (p) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<div
|
||||
*ngIf="this._stream"
|
||||
class="OV_stream"
|
||||
[ngClass]="{'no-size': !showVideo}"
|
||||
[ngClass]="{ 'no-size': !showVideo }"
|
||||
[id]="'container-' + this._stream.streamManager?.stream?.streamId"
|
||||
#streamContainer
|
||||
>
|
||||
|
@ -50,11 +50,18 @@
|
|||
</div>
|
||||
|
||||
<div *ngIf="!isMinimal && showSettingsButton" id="settings-container" class="videoButtons">
|
||||
<button mat-icon-button (click)="toggleVideoMenu($event)" matTooltip="{{ 'STREAM.SETTINGS' | translate }}" matTooltipPosition="above" aria-label="Video settings menu" id="stream-menu-btn">
|
||||
<button
|
||||
mat-icon-button
|
||||
(click)="toggleVideoMenu($event)"
|
||||
matTooltip="{{ 'STREAM.SETTINGS' | translate }}"
|
||||
matTooltipPosition="above"
|
||||
aria-label="Video settings menu"
|
||||
id="video-settings-btn-{{this._stream.streamManager?.stream?.typeOfVideo}}"
|
||||
>
|
||||
<mat-icon>more_vert</mat-icon>
|
||||
</button>
|
||||
<span [matMenuTriggerFor]="menu"></span>
|
||||
<mat-menu #menu="matMenu" yPosition="above" xPosition="before">
|
||||
<mat-menu #menu="matMenu" yPosition="above" xPosition="before" class="video-settings-menu">
|
||||
<button mat-menu-item id="videoZoomButton" (click)="toggleVideoEnlarged()">
|
||||
<mat-icon>{{ this.videoSizeIcon }}</mat-icon>
|
||||
<span *ngIf="videoSizeIcon === videoSizeIconEnum.NORMAL">{{ 'STREAM.ZOOM_OUT' | translate }}</span>
|
||||
|
@ -70,7 +77,7 @@
|
|||
<button
|
||||
mat-menu-item
|
||||
(click)="replaceScreenTrack()"
|
||||
id="changeScreenButton"
|
||||
id="replace-screen-button"
|
||||
*ngIf="!this._stream.streamManager?.remote && this._stream.streamManager?.stream?.typeOfVideo === videoTypeEnum.SCREEN"
|
||||
>
|
||||
<mat-icon>picture_in_picture</mat-icon>
|
||||
|
|
|
@ -177,8 +177,8 @@ export class DeviceService {
|
|||
return this.microphoneSelected;
|
||||
}
|
||||
|
||||
setCameraSelected(deviceField: any) {
|
||||
this.cameraSelected = this.getCameraByDeviceField(deviceField);
|
||||
setCameraSelected(deviceId: any) {
|
||||
this.cameraSelected = this.getCameraByDeviceField(deviceId);
|
||||
this.saveCameraToStorage(this.cameraSelected);
|
||||
}
|
||||
|
||||
|
@ -187,8 +187,8 @@ export class DeviceService {
|
|||
this.saveMicrophoneToStorage(this.microphoneSelected);
|
||||
}
|
||||
|
||||
needUpdateVideoTrack(newVideoSource: string): boolean {
|
||||
return this.cameraSelected?.device !== newVideoSource;
|
||||
needUpdateVideoTrack(newDevice: CustomDevice): boolean {
|
||||
return this.cameraSelected?.device !== newDevice.device || this.cameraSelected?.label !== newDevice.label;
|
||||
}
|
||||
|
||||
needUpdateAudioTrack(newAudioSource: string): boolean {
|
||||
|
|
Loading…
Reference in New Issue