openvidu-browser: Added common format config file

pull/750/head
csantosm 2022-08-17 18:04:05 +02:00
parent 128dd3cfed
commit 2ce54f577b
66 changed files with 3711 additions and 3328 deletions

View File

@ -0,0 +1,10 @@
{
"singleQuote": true,
"printWidth": 140,
"trailingComma": "none",
"semi": true,
"bracketSpacing": true,
"useTabs": false,
"jsxSingleQuote": true,
"tabWidth": 4
}

View File

@ -6,4 +6,4 @@ if (typeof globalThis !== 'undefined') {
} }
// Disable jsnlog when library is loaded // Disable jsnlog when library is loaded
JL.setOptions({ enabled: false }) JL.setOptions({ enabled: false });

View File

@ -29,13 +29,11 @@ import { ExceptionEvent, ExceptionEventName } from '../OpenViduInternal/Events/E
*/ */
const logger: OpenViduLogger = OpenViduLogger.getInstance(); const logger: OpenViduLogger = OpenViduLogger.getInstance();
/** /**
* Represents each one of the user's connection to the session (the local one and other user's connections). * Represents each one of the user's connection to the session (the local one and other user's connections).
* Therefore each [[Session]] and [[Stream]] object has an attribute of type Connection * Therefore each [[Session]] and [[Stream]] object has an attribute of type Connection
*/ */
export class Connection { export class Connection {
/** /**
* Unique identifier of the connection * Unique identifier of the connection
*/ */
@ -125,38 +123,46 @@ export class Connection {
logger.info(msg); logger.info(msg);
} }
/* Hidden methods */ /* Hidden methods */
/** /**
* @hidden * @hidden
*/ */
sendIceCandidate(candidate: RTCIceCandidate): void { sendIceCandidate(candidate: RTCIceCandidate): void {
logger.debug((!!this.stream!.outboundStreamOpts ? 'Local' : 'Remote') + 'candidate for' + this.connectionId, candidate);
logger.debug((!!this.stream!.outboundStreamOpts ? 'Local' : 'Remote') + 'candidate for' + this.session.openvidu.sendRequest(
this.connectionId, candidate); 'onIceCandidate',
{
this.session.openvidu.sendRequest('onIceCandidate', { endpointName: this.connectionId,
endpointName: this.connectionId, candidate: candidate.candidate,
candidate: candidate.candidate, sdpMid: candidate.sdpMid,
sdpMid: candidate.sdpMid, sdpMLineIndex: candidate.sdpMLineIndex
sdpMLineIndex: candidate.sdpMLineIndex },
}, (error, response) => { (error, response) => {
if (error) { if (error) {
logger.error('Error sending ICE candidate: ' + JSON.stringify(error)); logger.error('Error sending ICE candidate: ' + JSON.stringify(error));
this.session.emitEvent('exception', [new ExceptionEvent(this.session, ExceptionEventName.ICE_CANDIDATE_ERROR, this.session, "There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side", error)]); this.session.emitEvent('exception', [
new ExceptionEvent(
this.session,
ExceptionEventName.ICE_CANDIDATE_ERROR,
this.session,
'There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side',
error
)
]);
}
} }
}); );
} }
/** /**
* @hidden * @hidden
*/ */
initRemoteStreams(options: StreamOptionsServer[]): void { initRemoteStreams(options: StreamOptionsServer[]): void {
// This is ready for supporting multiple streams per Connection object. Right now the loop will always run just once // This is ready for supporting multiple streams per Connection object. Right now the loop will always run just once
// this.stream should also be replaced by a collection of streams to support multiple streams per Connection // this.stream should also be replaced by a collection of streams to support multiple streams per Connection
options.forEach(opts => { options.forEach((opts) => {
const streamOptions: InboundStreamOptions = { const streamOptions: InboundStreamOptions = {
id: opts.id, id: opts.id,
createdAt: opts.createdAt, createdAt: opts.createdAt,
@ -175,7 +181,10 @@ export class Connection {
this.addStream(stream); this.addStream(stream);
}); });
logger.info("Remote 'Connection' with 'connectionId' [" + this.connectionId + '] is now configured for receiving Streams with options: ', this.stream!.inboundStreamOpts); logger.info(
"Remote 'Connection' with 'connectionId' [" + this.connectionId + '] is now configured for receiving Streams with options: ',
this.stream!.inboundStreamOpts
);
} }
/** /**
@ -202,5 +211,4 @@ export class Connection {
} }
this.disposed = true; this.disposed = true;
} }
} }

View File

@ -27,7 +27,6 @@ import { OpenViduLogger } from '../OpenViduInternal/Logger/OpenViduLogger';
const logger: OpenViduLogger = OpenViduLogger.getInstance(); const logger: OpenViduLogger = OpenViduLogger.getInstance();
export abstract class EventDispatcher { export abstract class EventDispatcher {
/** /**
* @hidden * @hidden
*/ */
@ -42,27 +41,27 @@ export abstract class EventDispatcher {
* *
* @returns The EventDispatcher object * @returns The EventDispatcher object
*/ */
abstract on<K extends keyof (EventMap)>(type: K, handler: (event: (EventMap)[K]) => void): this; abstract on<K extends keyof EventMap>(type: K, handler: (event: EventMap[K]) => void): this;
/** /**
* Adds function `handler` to handle event `type` just once. The handler will be automatically removed after first execution * Adds function `handler` to handle event `type` just once. The handler will be automatically removed after first execution
* *
* @returns The object that dispatched the event * @returns The object that dispatched the event
*/ */
abstract once<K extends keyof (EventMap)>(type: K, handler: (event: (EventMap)[K]) => void): this; abstract once<K extends keyof EventMap>(type: K, handler: (event: EventMap[K]) => void): this;
/** /**
* Removes a `handler` from event `type`. If no handler is provided, all handlers will be removed from the event * Removes a `handler` from event `type`. If no handler is provided, all handlers will be removed from the event
* *
* @returns The object that dispatched the event * @returns The object that dispatched the event
*/ */
abstract off<K extends keyof (EventMap)>(type: K, handler?: (event: (EventMap)[K]) => void): this; abstract off<K extends keyof EventMap>(type: K, handler?: (event: EventMap[K]) => void): this;
/** /**
* @hidden * @hidden
*/ */
onAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher { onAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher {
const arrowHandler = event => { const arrowHandler = (event) => {
if (event) { if (event) {
logger.info(message, event); logger.info(message, event);
} else { } else {
@ -79,7 +78,7 @@ export abstract class EventDispatcher {
* @hidden * @hidden
*/ */
onceAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher { onceAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher {
const arrowHandler = event => { const arrowHandler = (event) => {
if (event) { if (event) {
logger.info(message, event); logger.info(message, event);
} else { } else {
@ -110,5 +109,4 @@ export abstract class EventDispatcher {
} }
return this; return this;
} }
}
}

View File

@ -32,7 +32,6 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
* Video/audio filter applied to a Stream. See [[Stream.applyFilter]] * Video/audio filter applied to a Stream. See [[Stream.applyFilter]]
*/ */
export class Filter { export class Filter {
/** /**
* Type of filter applied. This is the name of the remote class identifying the filter to apply in Kurento Media Server. * Type of filter applied. This is the name of the remote class identifying the filter to apply in Kurento Media Server.
* For example: `"FaceOverlayFilter"`, `"GStreamerFilter"`. * For example: `"FaceOverlayFilter"`, `"GStreamerFilter"`.
@ -59,7 +58,8 @@ export class Filter {
* You can use this value to know the current status of any applied filter * You can use this value to know the current status of any applied filter
*/ */
lastExecMethod?: { lastExecMethod?: {
method: string, params: Object method: string;
params: Object;
}; };
/** /**
@ -73,7 +73,6 @@ export class Filter {
stream: Stream; stream: Stream;
private logger: OpenViduLogger; private logger: OpenViduLogger;
/** /**
* @hidden * @hidden
*/ */
@ -82,7 +81,6 @@ export class Filter {
this.options = options; this.options = options;
} }
/** /**
* Executes a filter method. Available methods are specific for each filter * Executes a filter method. Available methods are specific for each filter
* *
@ -91,24 +89,40 @@ export class Filter {
*/ */
execMethod(method: string, params: Object): Promise<void> { execMethod(method: string, params: Object): Promise<void> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
logger.info('Executing filter method to stream ' + this.stream.streamId); logger.info('Executing filter method to stream ' + this.stream.streamId);
let finalParams; let finalParams;
const successExecMethod = triggerEvent => { const successExecMethod = (triggerEvent) => {
logger.info('Filter method successfully executed on Stream ' + this.stream.streamId); logger.info('Filter method successfully executed on Stream ' + this.stream.streamId);
const oldValue = (<any>Object).assign({}, this.stream.filter); const oldValue = (<any>Object).assign({}, this.stream.filter);
this.stream.filter!.lastExecMethod = { method, params: finalParams }; this.stream.filter!.lastExecMethod = { method, params: finalParams };
if (triggerEvent) { if (triggerEvent) {
this.stream.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.stream.session, this.stream, 'filter', this.stream.filter!, oldValue, 'execFilterMethod')]); this.stream.session.emitEvent('streamPropertyChanged', [
this.stream.streamManager.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.stream.streamManager, this.stream, 'filter', this.stream.filter!, oldValue, 'execFilterMethod')]); new StreamPropertyChangedEvent(
this.stream.session,
this.stream,
'filter',
this.stream.filter!,
oldValue,
'execFilterMethod'
)
]);
this.stream.streamManager.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(
this.stream.streamManager,
this.stream,
'filter',
this.stream.filter!,
oldValue,
'execFilterMethod'
)
]);
} }
return resolve(); return resolve();
} };
if (this.type.startsWith('VB:')) { if (this.type.startsWith('VB:')) {
if (typeof params === 'string') { if (typeof params === 'string') {
try { try {
params = JSON.parse(params); params = JSON.parse(params);
@ -121,23 +135,31 @@ export class Filter {
if (method === 'update') { if (method === 'update') {
if (!this.stream.virtualBackgroundSinkElements?.VB) { if (!this.stream.virtualBackgroundSinkElements?.VB) {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'There is no Virtual Background filter applied')); return reject(
new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'There is no Virtual Background filter applied')
);
} else { } else {
this.stream.virtualBackgroundSinkElements.VB.updateValues(params) this.stream.virtualBackgroundSinkElements.VB.updateValues(params)
.then(() => successExecMethod(false)) .then(() => successExecMethod(false))
.catch(error => { .catch((error) => {
if (error.name === OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR) { if (error.name === OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR) {
return reject(new OpenViduError(error.name, error.message)); return reject(new OpenViduError(error.name, error.message));
} else { } else {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'Error updating values on Virtual Background filter: ' + error)); return reject(
new OpenViduError(
OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR,
'Error updating values on Virtual Background filter: ' + error
)
);
} }
}); });
} }
} else { } else {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, `Unknown Virtual Background method "${method}"`)); return reject(
new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, `Unknown Virtual Background method "${method}"`)
);
} }
} else { } else {
let stringParams; let stringParams;
if (typeof params !== 'string') { if (typeof params !== 'string') {
try { try {
@ -160,7 +182,12 @@ export class Filter {
if (error) { if (error) {
logger.error('Error executing filter method for Stream ' + this.stream.streamId, error); logger.error('Error executing filter method for Stream ' + this.stream.streamId, error);
if (error.code === 401) { if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to execute a filter method")); return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to execute a filter method"
)
);
} else { } else {
return reject(error); return reject(error);
} }
@ -173,7 +200,6 @@ export class Filter {
}); });
} }
/** /**
* Subscribe to certain filter event. Available events are specific for each filter * Subscribe to certain filter event. Available events are specific for each filter
* *
@ -190,15 +216,25 @@ export class Filter {
{ streamId: this.stream.streamId, eventType }, { streamId: this.stream.streamId, eventType },
(error, response) => { (error, response) => {
if (error) { if (error) {
logger.error('Error adding filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId, error); logger.error(
'Error adding filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId,
error
);
if (error.code === 401) { if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to add a filter event listener")); return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to add a filter event listener"
)
);
} else { } else {
return reject(error); return reject(error);
} }
} else { } else {
this.handlers.set(eventType, handler); this.handlers.set(eventType, handler);
logger.info('Filter event listener to event ' + eventType + ' successfully applied on Stream ' + this.stream.streamId); logger.info(
'Filter event listener to event ' + eventType + ' successfully applied on Stream ' + this.stream.streamId
);
return resolve(); return resolve();
} }
} }
@ -206,7 +242,6 @@ export class Filter {
}); });
} }
/** /**
* Removes certain filter event listener previously set. * Removes certain filter event listener previously set.
* *
@ -222,20 +257,29 @@ export class Filter {
{ streamId: this.stream.streamId, eventType }, { streamId: this.stream.streamId, eventType },
(error, response) => { (error, response) => {
if (error) { if (error) {
logger.error('Error removing filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId, error); logger.error(
'Error removing filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId,
error
);
if (error.code === 401) { if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to add a filter event listener")); return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to add a filter event listener"
)
);
} else { } else {
return reject(error); return reject(error);
} }
} else { } else {
this.handlers.delete(eventType); this.handlers.delete(eventType);
logger.info('Filter event listener to event ' + eventType + ' successfully removed on Stream ' + this.stream.streamId); logger.info(
'Filter event listener to event ' + eventType + ' successfully removed on Stream ' + this.stream.streamId
);
return resolve(); return resolve();
} }
} }
); );
}); });
} }
}
}

View File

@ -31,12 +31,10 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
*/ */
let platform: PlatformUtils; let platform: PlatformUtils;
/** /**
* Easy recording of [[Stream]] objects straightaway from the browser. Initialized with [[OpenVidu.initLocalRecorder]] method * Easy recording of [[Stream]] objects straightaway from the browser. Initialized with [[OpenVidu.initLocalRecorder]] method
*/ */
export class LocalRecorder { export class LocalRecorder {
state: LocalRecorderState; state: LocalRecorderState;
private connectionId: string; private connectionId: string;
@ -52,18 +50,17 @@ export class LocalRecorder {
*/ */
constructor(private stream: Stream) { constructor(private stream: Stream) {
platform = PlatformUtils.getInstance(); platform = PlatformUtils.getInstance();
this.connectionId = (!!this.stream.connection) ? this.stream.connection.connectionId : 'default-connection'; this.connectionId = !!this.stream.connection ? this.stream.connection.connectionId : 'default-connection';
this.id = this.stream.streamId + '_' + this.connectionId + '_localrecord'; this.id = this.stream.streamId + '_' + this.connectionId + '_localrecord';
this.state = LocalRecorderState.READY; this.state = LocalRecorderState.READY;
} }
/** /**
* Starts the recording of the Stream. [[state]] property must be `READY`. After method succeeds is set to `RECORDING` * Starts the recording of the Stream. [[state]] property must be `READY`. After method succeeds is set to `RECORDING`
* *
* @param options The [MediaRecorder.options](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/MediaRecorder#parameters) to be used to record this Stream. * @param options The [MediaRecorder.options](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/MediaRecorder#parameters) to be used to record this Stream.
* For example: * For example:
* *
* ```javascript * ```javascript
* var OV = new OpenVidu(); * var OV = new OpenVidu();
* var publisher = await OV.initPublisherAsync(); * var publisher = await OV.initPublisherAsync();
@ -75,7 +72,7 @@ export class LocalRecorder {
* }; * };
* localRecorder.record(options); * localRecorder.record(options);
* ``` * ```
* *
* If not specified, the default options preferred by the platform will be used. * If not specified, the default options preferred by the platform will be used.
* *
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully started and rejected with an Error object if not * @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully started and rejected with an Error object if not
@ -84,14 +81,24 @@ export class LocalRecorder {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (typeof options === 'string' || options instanceof String) { if (typeof options === 'string' || options instanceof String) {
return reject(`When calling LocalRecorder.record(options) parameter 'options' cannot be a string. Must be an object like { mimeType: "${options}" }`); return reject(
`When calling LocalRecorder.record(options) parameter 'options' cannot be a string. Must be an object like { mimeType: "${options}" }`
);
} }
if (typeof MediaRecorder === 'undefined') { if (typeof MediaRecorder === 'undefined') {
logger.error('MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'); logger.error(
throw (Error('MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder')); 'MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'
);
throw Error(
'MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'
);
} }
if (this.state !== LocalRecorderState.READY) { if (this.state !== LocalRecorderState.READY) {
throw (Error('\'LocalRecord.record()\' needs \'LocalRecord.state\' to be \'READY\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.clean()\' or init a new LocalRecorder before')); throw Error(
"'LocalRecord.record()' needs 'LocalRecord.state' to be 'READY' (current value: '" +
this.state +
"'). Call 'LocalRecorder.clean()' or init a new LocalRecorder before"
);
} }
logger.log("Starting local recording of stream '" + this.stream.streamId + "' of connection '" + this.connectionId + "'"); logger.log("Starting local recording of stream '" + this.stream.streamId + "' of connection '" + this.connectionId + "'");
@ -103,7 +110,6 @@ export class LocalRecorder {
this.mediaRecorder = new MediaRecorder(this.stream.getMediaStream(), options); this.mediaRecorder = new MediaRecorder(this.stream.getMediaStream(), options);
this.mediaRecorder.start(); this.mediaRecorder.start();
} catch (err) { } catch (err) {
return reject(err); return reject(err);
} }
@ -136,11 +142,9 @@ export class LocalRecorder {
this.state = LocalRecorderState.RECORDING; this.state = LocalRecorderState.RECORDING;
return resolve(); return resolve();
}); });
} }
/** /**
* Ends the recording of the Stream. [[state]] property must be `RECORDING` or `PAUSED`. After method succeeds is set to `FINISHED` * Ends the recording of the Stream. [[state]] property must be `RECORDING` or `PAUSED`. After method succeeds is set to `FINISHED`
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully stopped and rejected with an Error object if not * @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully stopped and rejected with an Error object if not
@ -149,7 +153,11 @@ export class LocalRecorder {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (this.state === LocalRecorderState.READY || this.state === LocalRecorderState.FINISHED) { if (this.state === LocalRecorderState.READY || this.state === LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.stop()\' needs \'LocalRecord.state\' to be \'RECORDING\' or \'PAUSED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.start()\' before')); throw Error(
"'LocalRecord.stop()' needs 'LocalRecord.state' to be 'RECORDING' or 'PAUSED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.start()' before"
);
} }
this.mediaRecorder.onstop = () => { this.mediaRecorder.onstop = () => {
this.onStopDefault(); this.onStopDefault();
@ -162,7 +170,6 @@ export class LocalRecorder {
}); });
} }
/** /**
* Pauses the recording of the Stream. [[state]] property must be `RECORDING`. After method succeeds is set to `PAUSED` * Pauses the recording of the Stream. [[state]] property must be `RECORDING`. After method succeeds is set to `PAUSED`
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording was successfully paused and rejected with an Error object if not * @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording was successfully paused and rejected with an Error object if not
@ -171,7 +178,13 @@ export class LocalRecorder {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (this.state !== LocalRecorderState.RECORDING) { if (this.state !== LocalRecorderState.RECORDING) {
return reject(Error('\'LocalRecord.pause()\' needs \'LocalRecord.state\' to be \'RECORDING\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.start()\' or \'LocalRecorder.resume()\' before')); return reject(
Error(
"'LocalRecord.pause()' needs 'LocalRecord.state' to be 'RECORDING' (current value: '" +
this.state +
"'). Call 'LocalRecorder.start()' or 'LocalRecorder.resume()' before"
)
);
} }
this.mediaRecorder.pause(); this.mediaRecorder.pause();
this.state = LocalRecorderState.PAUSED; this.state = LocalRecorderState.PAUSED;
@ -190,7 +203,11 @@ export class LocalRecorder {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
if (this.state !== LocalRecorderState.PAUSED) { if (this.state !== LocalRecorderState.PAUSED) {
throw (Error('\'LocalRecord.resume()\' needs \'LocalRecord.state\' to be \'PAUSED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.pause()\' before')); throw Error(
"'LocalRecord.resume()' needs 'LocalRecord.state' to be 'PAUSED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.pause()' before"
);
} }
this.mediaRecorder.resume(); this.mediaRecorder.resume();
this.state = LocalRecorderState.RECORDING; this.state = LocalRecorderState.RECORDING;
@ -201,14 +218,16 @@ export class LocalRecorder {
}); });
} }
/** /**
* Previews the recording, appending a new HTMLVideoElement to element with id `parentId`. [[state]] property must be `FINISHED` * Previews the recording, appending a new HTMLVideoElement to element with id `parentId`. [[state]] property must be `FINISHED`
*/ */
preview(parentElement): HTMLVideoElement { preview(parentElement): HTMLVideoElement {
if (this.state !== LocalRecorderState.FINISHED) { if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.preview()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before')); throw Error(
"'LocalRecord.preview()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
);
} }
this.videoPreview = document.createElement('video'); this.videoPreview = document.createElement('video');
@ -234,7 +253,6 @@ export class LocalRecorder {
return this.videoPreview; return this.videoPreview;
} }
/** /**
* Gracefully stops and cleans the current recording (WARNING: it is completely dismissed). Sets [[state]] to `READY` so the recording can start again * Gracefully stops and cleans the current recording (WARNING: it is completely dismissed). Sets [[state]] to `READY` so the recording can start again
*/ */
@ -245,19 +263,24 @@ export class LocalRecorder {
this.state = LocalRecorderState.READY; this.state = LocalRecorderState.READY;
}; };
if (this.state === LocalRecorderState.RECORDING || this.state === LocalRecorderState.PAUSED) { if (this.state === LocalRecorderState.RECORDING || this.state === LocalRecorderState.PAUSED) {
this.stop().then(() => f()).catch(() => f()); this.stop()
.then(() => f())
.catch(() => f());
} else { } else {
f(); f();
} }
} }
/** /**
* Downloads the recorded video through the browser. [[state]] property must be `FINISHED` * Downloads the recorded video through the browser. [[state]] property must be `FINISHED`
*/ */
download(): void { download(): void {
if (this.state !== LocalRecorderState.FINISHED) { if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.download()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before')); throw Error(
"'LocalRecord.download()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
);
} else { } else {
const a: HTMLAnchorElement = document.createElement('a'); const a: HTMLAnchorElement = document.createElement('a');
a.style.display = 'none'; a.style.display = 'none';
@ -278,13 +301,12 @@ export class LocalRecorder {
*/ */
getBlob(): Blob { getBlob(): Blob {
if (this.state !== LocalRecorderState.FINISHED) { if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('Call \'LocalRecord.stop()\' before getting Blob file')); throw Error("Call 'LocalRecord.stop()' before getting Blob file");
} else { } else {
return this.blob!; return this.blob!;
} }
} }
/** /**
* Uploads the recorded video as a binary file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example: * Uploads the recorded video as a binary file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example:
* ``` * ```
@ -298,7 +320,13 @@ export class LocalRecorder {
uploadAsBinary(endpoint: string, headers?: any): Promise<any> { uploadAsBinary(endpoint: string, headers?: any): Promise<any> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (this.state !== LocalRecorderState.FINISHED) { if (this.state !== LocalRecorderState.FINISHED) {
return reject(Error('\'LocalRecord.uploadAsBinary()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before')); return reject(
Error(
"'LocalRecord.uploadAsBinary()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
)
);
} else { } else {
const http = new XMLHttpRequest(); const http = new XMLHttpRequest();
http.open('POST', endpoint, true); http.open('POST', endpoint, true);
@ -324,7 +352,6 @@ export class LocalRecorder {
}); });
} }
/** /**
* Uploads the recorded video as a multipart file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example: * Uploads the recorded video as a multipart file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example:
* ``` * ```
@ -338,7 +365,13 @@ export class LocalRecorder {
uploadAsMultipartfile(endpoint: string, headers?: any): Promise<any> { uploadAsMultipartfile(endpoint: string, headers?: any): Promise<any> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (this.state !== LocalRecorderState.FINISHED) { if (this.state !== LocalRecorderState.FINISHED) {
return reject(Error('\'LocalRecord.uploadAsMultipartfile()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before')); return reject(
Error(
"'LocalRecord.uploadAsMultipartfile()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
)
);
} else { } else {
const http = new XMLHttpRequest(); const http = new XMLHttpRequest();
http.open('POST', endpoint, true); http.open('POST', endpoint, true);
@ -368,7 +401,6 @@ export class LocalRecorder {
}); });
} }
/* Private methods */ /* Private methods */
private onStopDefault(): void { private onStopDefault(): void {
@ -381,5 +413,4 @@ export class LocalRecorder {
this.state = LocalRecorderState.FINISHED; this.state = LocalRecorderState.FINISHED;
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -45,7 +45,6 @@ let platform: PlatformUtils;
* See available event listeners at [[PublisherEventMap]]. * See available event listeners at [[PublisherEventMap]].
*/ */
export class Publisher extends StreamManager { export class Publisher extends StreamManager {
/** /**
* Whether the Publisher has been granted access to the requested input devices or not * Whether the Publisher has been granted access to the requested input devices or not
*/ */
@ -82,7 +81,13 @@ export class Publisher extends StreamManager {
* @hidden * @hidden
*/ */
constructor(targEl: string | HTMLElement | undefined, properties: PublisherProperties, openvidu: OpenVidu) { constructor(targEl: string | HTMLElement | undefined, properties: PublisherProperties, openvidu: OpenVidu) {
super(new Stream((!!openvidu.session) ? openvidu.session : new Session(openvidu), { publisherProperties: properties, mediaConstraints: {} }), targEl); super(
new Stream(!!openvidu.session ? openvidu.session : new Session(openvidu), {
publisherProperties: properties,
mediaConstraints: {}
}),
targEl
);
platform = PlatformUtils.getInstance(); platform = PlatformUtils.getInstance();
this.properties = properties; this.properties = properties;
this.openvidu = openvidu; this.openvidu = openvidu;
@ -95,7 +100,6 @@ export class Publisher extends StreamManager {
}); });
} }
/** /**
* Publish or unpublish the audio stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect * Publish or unpublish the audio stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect
* *
@ -115,7 +119,9 @@ export class Publisher extends StreamManager {
*/ */
publishAudio(enabled: boolean): void { publishAudio(enabled: boolean): void {
if (this.stream.audioActive !== enabled) { if (this.stream.audioActive !== enabled) {
const affectedMediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream(); const affectedMediaStream: MediaStream = this.stream.displayMyRemote()
? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
affectedMediaStream.getAudioTracks().forEach((track) => { affectedMediaStream.getAudioTracks().forEach((track) => {
track.enabled = enabled; track.enabled = enabled;
}); });
@ -132,18 +138,22 @@ export class Publisher extends StreamManager {
if (error) { if (error) {
logger.error("Error sending 'streamPropertyChanged' event", error); logger.error("Error sending 'streamPropertyChanged' event", error);
} else { } else {
this.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.session, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')]); this.session.emitEvent('streamPropertyChanged', [
this.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')]); new StreamPropertyChangedEvent(this.session, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')
]);
this.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(this, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')
]);
this.session.sendVideoData(this.stream.streamManager); this.session.sendVideoData(this.stream.streamManager);
} }
}); }
);
} }
this.stream.audioActive = enabled; this.stream.audioActive = enabled;
logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its audio stream'); logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its audio stream');
} }
} }
/** /**
* Publish or unpublish the video stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect * Publish or unpublish the video stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect
* *
@ -169,12 +179,11 @@ export class Publisher extends StreamManager {
* will be used instead. * will be used instead.
*/ */
publishVideo<T extends boolean>(enabled: T, resource?: T extends false ? boolean : MediaStreamTrack): Promise<void> { publishVideo<T extends boolean>(enabled: T, resource?: T extends false ? boolean : MediaStreamTrack): Promise<void> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
if (this.stream.videoActive !== enabled) { if (this.stream.videoActive !== enabled) {
const affectedMediaStream: MediaStream = this.stream.displayMyRemote()
const affectedMediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream(); ? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
let mustRestartMediaStream = false; let mustRestartMediaStream = false;
affectedMediaStream.getVideoTracks().forEach((track) => { affectedMediaStream.getVideoTracks().forEach((track) => {
track.enabled = enabled; track.enabled = enabled;
@ -212,13 +221,16 @@ export class Publisher extends StreamManager {
delete this.stream.lastVBFilter; delete this.stream.lastVBFilter;
}, 1); }, 1);
} }
} };
if (!!resource && resource instanceof MediaStreamTrack) { if (!!resource && resource instanceof MediaStreamTrack) {
await replaceVideoTrack(resource); await replaceVideoTrack(resource);
} else { } else {
try { try {
const mediaStream = await navigator.mediaDevices.getUserMedia({ audio: false, video: this.stream.lastVideoTrackConstraints }); const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: this.stream.lastVideoTrackConstraints
});
await replaceVideoTrack(mediaStream.getVideoTracks()[0]); await replaceVideoTrack(mediaStream.getVideoTracks()[0]);
} catch (error) { } catch (error) {
return reject(error); return reject(error);
@ -239,11 +251,23 @@ export class Publisher extends StreamManager {
if (error) { if (error) {
logger.error("Error sending 'streamPropertyChanged' event", error); logger.error("Error sending 'streamPropertyChanged' event", error);
} else { } else {
this.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.session, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')]); this.session.emitEvent('streamPropertyChanged', [
this.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')]); new StreamPropertyChangedEvent(
this.session,
this.stream,
'videoActive',
enabled,
!enabled,
'publishVideo'
)
]);
this.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(this, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')
]);
this.session.sendVideoData(this.stream.streamManager); this.session.sendVideoData(this.stream.streamManager);
} }
}); }
);
} }
this.stream.videoActive = enabled; this.stream.videoActive = enabled;
logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its video stream'); logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its video stream');
@ -252,22 +276,19 @@ export class Publisher extends StreamManager {
}); });
} }
/** /**
* Call this method before [[Session.publish]] if you prefer to subscribe to your Publisher's remote stream instead of using the local stream, as any other user would do. * Call this method before [[Session.publish]] if you prefer to subscribe to your Publisher's remote stream instead of using the local stream, as any other user would do.
*/ */
subscribeToRemote(value?: boolean): void { subscribeToRemote(value?: boolean): void {
value = (value !== undefined) ? value : true; value = value !== undefined ? value : true;
this.isSubscribedToRemote = value; this.isSubscribedToRemote = value;
this.stream.subscribeToMyRemote(value); this.stream.subscribeToMyRemote(value);
} }
/** /**
* See [[EventDispatcher.on]] * See [[EventDispatcher.on]]
*/ */
on<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this { on<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this {
super.on(<any>type, handler); super.on(<any>type, handler);
if (type === 'streamCreated') { if (type === 'streamCreated') {
@ -292,12 +313,10 @@ export class Publisher extends StreamManager {
return this; return this;
} }
/** /**
* See [[EventDispatcher.once]] * See [[EventDispatcher.once]]
*/ */
once<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this { once<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this {
super.once(<any>type, handler); super.once(<any>type, handler);
if (type === 'streamCreated') { if (type === 'streamCreated') {
@ -322,7 +341,6 @@ export class Publisher extends StreamManager {
return this; return this;
} }
/** /**
* See [[EventDispatcher.off]] * See [[EventDispatcher.off]]
*/ */
@ -331,7 +349,6 @@ export class Publisher extends StreamManager {
return this; return this;
} }
/** /**
* Replaces the current video or audio track with a different one. This allows you to replace an ongoing track with a different one * Replaces the current video or audio track with a different one. This allows you to replace an ongoing track with a different one
* without having to renegotiate the whole WebRTC connection (that is, initializing a new Publisher, unpublishing the previous one * without having to renegotiate the whole WebRTC connection (that is, initializing a new Publisher, unpublishing the previous one
@ -359,7 +376,6 @@ export class Publisher extends StreamManager {
*/ */
initialize(): Promise<void> { initialize(): Promise<void> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
let constraints: MediaStreamConstraints = {}; let constraints: MediaStreamConstraints = {};
let constraintsAux: MediaStreamConstraints = {}; let constraintsAux: MediaStreamConstraints = {};
const timeForDialogEvent = 2000; const timeForDialogEvent = 2000;
@ -368,7 +384,7 @@ export class Publisher extends StreamManager {
const errorCallback = (openViduError: OpenViduError) => { const errorCallback = (openViduError: OpenViduError) => {
this.accessDenied = true; this.accessDenied = true;
this.accessAllowed = false; this.accessAllowed = false;
logger.error(`Publisher initialization failed. ${openViduError.name}: ${openViduError.message}`) logger.error(`Publisher initialization failed. ${openViduError.name}: ${openViduError.message}`);
return reject(openViduError); return reject(openViduError);
}; };
@ -378,21 +394,27 @@ export class Publisher extends StreamManager {
if (typeof MediaStreamTrack !== 'undefined' && this.properties.audioSource instanceof MediaStreamTrack) { if (typeof MediaStreamTrack !== 'undefined' && this.properties.audioSource instanceof MediaStreamTrack) {
mediaStream.removeTrack(mediaStream.getAudioTracks()[0]); mediaStream.removeTrack(mediaStream.getAudioTracks()[0]);
mediaStream.addTrack((<MediaStreamTrack>this.properties.audioSource)); mediaStream.addTrack(<MediaStreamTrack>this.properties.audioSource);
} }
if (typeof MediaStreamTrack !== 'undefined' && this.properties.videoSource instanceof MediaStreamTrack) { if (typeof MediaStreamTrack !== 'undefined' && this.properties.videoSource instanceof MediaStreamTrack) {
mediaStream.removeTrack(mediaStream.getVideoTracks()[0]); mediaStream.removeTrack(mediaStream.getVideoTracks()[0]);
mediaStream.addTrack((<MediaStreamTrack>this.properties.videoSource)); mediaStream.addTrack(<MediaStreamTrack>this.properties.videoSource);
} }
// Apply PublisherProperties.publishAudio and PublisherProperties.publishVideo // Apply PublisherProperties.publishAudio and PublisherProperties.publishVideo
if (!!mediaStream.getAudioTracks()[0]) { if (!!mediaStream.getAudioTracks()[0]) {
const enabled = (this.stream.audioActive !== undefined && this.stream.audioActive !== null) ? this.stream.audioActive : !!this.stream.outboundStreamOpts.publisherProperties.publishAudio; const enabled =
this.stream.audioActive !== undefined && this.stream.audioActive !== null
? this.stream.audioActive
: !!this.stream.outboundStreamOpts.publisherProperties.publishAudio;
mediaStream.getAudioTracks()[0].enabled = enabled; mediaStream.getAudioTracks()[0].enabled = enabled;
} }
if (!!mediaStream.getVideoTracks()[0]) { if (!!mediaStream.getVideoTracks()[0]) {
const enabled = (this.stream.videoActive !== undefined && this.stream.videoActive !== null) ? this.stream.videoActive : !!this.stream.outboundStreamOpts.publisherProperties.publishVideo; const enabled =
this.stream.videoActive !== undefined && this.stream.videoActive !== null
? this.stream.videoActive
: !!this.stream.outboundStreamOpts.publisherProperties.publishVideo;
mediaStream.getVideoTracks()[0].enabled = enabled; mediaStream.getVideoTracks()[0].enabled = enabled;
} }
@ -411,16 +433,16 @@ export class Publisher extends StreamManager {
// https://w3c.github.io/mst-content-hint/#video-content-hints // https://w3c.github.io/mst-content-hint/#video-content-hints
switch (this.stream.typeOfVideo) { switch (this.stream.typeOfVideo) {
case TypeOfVideo.SCREEN: case TypeOfVideo.SCREEN:
track.contentHint = "detail"; track.contentHint = 'detail';
break; break;
case TypeOfVideo.CUSTOM: case TypeOfVideo.CUSTOM:
logger.warn("CUSTOM type video track was provided without Content Hint!"); logger.warn('CUSTOM type video track was provided without Content Hint!');
track.contentHint = "motion"; track.contentHint = 'motion';
break; break;
case TypeOfVideo.CAMERA: case TypeOfVideo.CAMERA:
case TypeOfVideo.IPCAM: case TypeOfVideo.IPCAM:
default: default:
track.contentHint = "motion"; track.contentHint = 'motion';
break; break;
} }
logger.info(`Video track Content Hint set: '${track.contentHint}'`); logger.info(`Video track Content Hint set: '${track.contentHint}'`);
@ -438,7 +460,7 @@ export class Publisher extends StreamManager {
if (this.stream.isSendVideo()) { if (this.stream.isSendVideo()) {
// Has video track // Has video track
this.getVideoDimensions().then(dimensions => { this.getVideoDimensions().then((dimensions) => {
this.stream.videoDimensions = { this.stream.videoDimensions = {
width: dimensions.width, width: dimensions.width,
height: dimensions.height height: dimensions.height
@ -491,7 +513,6 @@ export class Publisher extends StreamManager {
this.clearPermissionDialogTimer(startTime, timeForDialogEvent); this.clearPermissionDialogTimer(startTime, timeForDialogEvent);
mediaStream.addTrack(audioOnlyStream.getAudioTracks()[0]); mediaStream.addTrack(audioOnlyStream.getAudioTracks()[0]);
successCallback(mediaStream); successCallback(mediaStream);
} catch (error) { } catch (error) {
this.clearPermissionDialogTimer(startTime, timeForDialogEvent); this.clearPermissionDialogTimer(startTime, timeForDialogEvent);
mediaStream.getAudioTracks().forEach((track) => { mediaStream.getAudioTracks().forEach((track) => {
@ -529,7 +550,6 @@ export class Publisher extends StreamManager {
errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND; errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND;
errorMessage = error.toString(); errorMessage = error.toString();
errorCallback(new OpenViduError(errorName, errorMessage)); errorCallback(new OpenViduError(errorName, errorMessage));
} catch (error) { } catch (error) {
errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND; errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND;
errorMessage = error.toString(); errorMessage = error.toString();
@ -538,12 +558,13 @@ export class Publisher extends StreamManager {
break; break;
case 'notallowederror': case 'notallowederror':
errorName = this.stream.isSendScreen() ? OpenViduErrorName.SCREEN_CAPTURE_DENIED : OpenViduErrorName.DEVICE_ACCESS_DENIED; errorName = this.stream.isSendScreen()
? OpenViduErrorName.SCREEN_CAPTURE_DENIED
: OpenViduErrorName.DEVICE_ACCESS_DENIED;
errorMessage = error.toString(); errorMessage = error.toString();
errorCallback(new OpenViduError(errorName, errorMessage)); errorCallback(new OpenViduError(errorName, errorMessage));
break; break;
case 'overconstrainederror': case 'overconstrainederror':
try { try {
const mediaStream = await navigator.mediaDevices.getUserMedia({ const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: false, audio: false,
@ -554,20 +575,27 @@ export class Publisher extends StreamManager {
}); });
if (error.constraint.toLowerCase() === 'deviceid') { if (error.constraint.toLowerCase() === 'deviceid') {
errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND; errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND;
errorMessage = "Audio input device with deviceId '" + (<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.audio).deviceId!!).exact + "' not found"; errorMessage =
"Audio input device with deviceId '" +
(<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.audio).deviceId!!).exact +
"' not found";
} else { } else {
errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR; errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR;
errorMessage = "Audio input device doesn't support the value passed for constraint '" + error.constraint + "'"; errorMessage =
"Audio input device doesn't support the value passed for constraint '" + error.constraint + "'";
} }
errorCallback(new OpenViduError(errorName, errorMessage)); errorCallback(new OpenViduError(errorName, errorMessage));
} catch (error) { } catch (error) {
if (error.constraint.toLowerCase() === 'deviceid') { if (error.constraint.toLowerCase() === 'deviceid') {
errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND; errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND;
errorMessage = "Video input device with deviceId '" + (<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.video).deviceId!!).exact + "' not found"; errorMessage =
"Video input device with deviceId '" +
(<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.video).deviceId!!).exact +
"' not found";
} else { } else {
errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR; errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR;
errorMessage = "Video input device doesn't support the value passed for constraint '" + error.constraint + "'"; errorMessage =
"Video input device doesn't support the value passed for constraint '" + error.constraint + "'";
} }
errorCallback(new OpenViduError(errorName, errorMessage)); errorCallback(new OpenViduError(errorName, errorMessage));
} }
@ -585,13 +613,15 @@ export class Publisher extends StreamManager {
errorCallback(new OpenViduError(errorName, errorMessage)); errorCallback(new OpenViduError(errorName, errorMessage));
break; break;
} }
} };
try { try {
const myConstraints = await this.openvidu.generateMediaConstraints(this.properties); const myConstraints = await this.openvidu.generateMediaConstraints(this.properties);
if (!!myConstraints.videoTrack && !!myConstraints.audioTrack || if (
!!myConstraints.audioTrack && myConstraints.constraints?.video === false || (!!myConstraints.videoTrack && !!myConstraints.audioTrack) ||
!!myConstraints.videoTrack && myConstraints.constraints?.audio === false) { (!!myConstraints.audioTrack && myConstraints.constraints?.video === false) ||
(!!myConstraints.videoTrack && myConstraints.constraints?.audio === false)
) {
// No need to call getUserMedia at all. MediaStreamTracks already provided // No need to call getUserMedia at all. MediaStreamTracks already provided
successCallback(this.openvidu.addAlreadyProvidedTracks(myConstraints, new MediaStream(), this.stream)); successCallback(this.openvidu.addAlreadyProvidedTracks(myConstraints, new MediaStream(), this.stream));
} else { } else {
@ -603,7 +633,7 @@ export class Publisher extends StreamManager {
}; };
this.stream.setOutboundStreamOptions(outboundStreamOptions); this.stream.setOutboundStreamOptions(outboundStreamOptions);
const definedAudioConstraint = ((constraints.audio === undefined) ? true : constraints.audio); const definedAudioConstraint = constraints.audio === undefined ? true : constraints.audio;
constraintsAux.audio = this.stream.isSendScreen() ? false : definedAudioConstraint; constraintsAux.audio = this.stream.isSendScreen() ? false : definedAudioConstraint;
constraintsAux.video = constraints.video; constraintsAux.video = constraints.video;
startTime = Date.now(); startTime = Date.now();
@ -664,9 +694,8 @@ export class Publisher extends StreamManager {
* and then try to use MediaStreamTrack.getSettingsMethod(). If not available, then we * and then try to use MediaStreamTrack.getSettingsMethod(). If not available, then we
* use the HTMLVideoElement properties videoWidth and videoHeight * use the HTMLVideoElement properties videoWidth and videoHeight
*/ */
getVideoDimensions(): Promise<{ width: number, height: number }> { getVideoDimensions(): Promise<{ width: number; height: number }> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// Ionic iOS and Safari iOS supposedly require the video element to actually exist inside the DOM // Ionic iOS and Safari iOS supposedly require the video element to actually exist inside the DOM
const requiresDomInsertion: boolean = platform.isIonicIos() || platform.isIOSWithSafari(); const requiresDomInsertion: boolean = platform.isIonicIos() || platform.isIOSWithSafari();
@ -692,7 +721,7 @@ export class Publisher extends StreamManager {
} }
return resolve({ width, height }); return resolve({ width, height });
} };
if (this.videoReference.readyState >= 1) { if (this.videoReference.readyState >= 1) {
// The video already has metadata available // The video already has metadata available
@ -739,7 +768,14 @@ export class Publisher extends StreamManager {
this.videoReference.muted = true; this.videoReference.muted = true;
this.videoReference.autoplay = true; this.videoReference.autoplay = true;
this.videoReference.controls = false; this.videoReference.controls = false;
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) { if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
this.videoReference.playsInline = true; this.videoReference.playsInline = true;
} }
this.stream.setMediaStream(mediaStream); this.stream.setMediaStream(mediaStream);
@ -753,7 +789,9 @@ export class Publisher extends StreamManager {
* @hidden * @hidden
*/ */
replaceTrackInMediaStream(track: MediaStreamTrack, updateLastConstraints: boolean): void { replaceTrackInMediaStream(track: MediaStreamTrack, updateLastConstraints: boolean): void {
const mediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream(); const mediaStream: MediaStream = this.stream.displayMyRemote()
? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
let removedTrack: MediaStreamTrack; let removedTrack: MediaStreamTrack;
if (track.kind === 'video') { if (track.kind === 'video') {
removedTrack = mediaStream.getVideoTracks()[0]; removedTrack = mediaStream.getVideoTracks()[0];
@ -773,12 +811,12 @@ export class Publisher extends StreamManager {
}; };
if (track.kind === 'video' && updateLastConstraints) { if (track.kind === 'video' && updateLastConstraints) {
this.openvidu.sendNewVideoDimensionsIfRequired(this, 'trackReplaced', 50, 30); this.openvidu.sendNewVideoDimensionsIfRequired(this, 'trackReplaced', 50, 30);
this.openvidu.sendTrackChangedEvent(this,'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'videoActive'); this.openvidu.sendTrackChangedEvent(this, 'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'videoActive');
if(this.stream.isLocalStreamPublished) { if (this.stream.isLocalStreamPublished) {
this.session.sendVideoData(this.stream.streamManager, 5, true, 5); this.session.sendVideoData(this.stream.streamManager, 5, true, 5);
} }
} else if(track.kind === 'audio' && updateLastConstraints) { } else if (track.kind === 'audio' && updateLastConstraints) {
this.openvidu.sendTrackChangedEvent(this,'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'audioActive'); this.openvidu.sendTrackChangedEvent(this, 'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'audioActive');
} }
if (track.kind === 'audio') { if (track.kind === 'audio') {
this.stream.disableHarkSpeakingEvent(false); this.stream.disableHarkSpeakingEvent(false);
@ -798,7 +836,7 @@ export class Publisher extends StreamManager {
private clearPermissionDialogTimer(startTime: number, waitTime: number): void { private clearPermissionDialogTimer(startTime: number, waitTime: number): void {
clearTimeout(this.permissionDialogTimeout); clearTimeout(this.permissionDialogTimeout);
if ((Date.now() - startTime) > waitTime) { if (Date.now() - startTime > waitTime) {
// Permission dialog was shown and now is closed // Permission dialog was shown and now is closed
this.emitEvent('accessDialogClosed', []); this.emitEvent('accessDialogClosed', []);
} }
@ -808,19 +846,18 @@ export class Publisher extends StreamManager {
const senders: RTCRtpSender[] = this.stream.getRTCPeerConnection().getSenders(); const senders: RTCRtpSender[] = this.stream.getRTCPeerConnection().getSenders();
let sender: RTCRtpSender | undefined; let sender: RTCRtpSender | undefined;
if (track.kind === 'video') { if (track.kind === 'video') {
sender = senders.find(s => !!s.track && s.track.kind === 'video'); sender = senders.find((s) => !!s.track && s.track.kind === 'video');
if (!sender) { if (!sender) {
throw new Error('There\'s no replaceable track for that kind of MediaStreamTrack in this Publisher object'); throw new Error("There's no replaceable track for that kind of MediaStreamTrack in this Publisher object");
} }
} else if (track.kind === 'audio') { } else if (track.kind === 'audio') {
sender = senders.find(s => !!s.track && s.track.kind === 'audio'); sender = senders.find((s) => !!s.track && s.track.kind === 'audio');
if (!sender) { if (!sender) {
throw new Error('There\'s no replaceable track for that kind of MediaStreamTrack in this Publisher object'); throw new Error("There's no replaceable track for that kind of MediaStreamTrack in this Publisher object");
} }
} else { } else {
throw new Error('Unknown track kind ' + track.kind); throw new Error('Unknown track kind ' + track.kind);
} }
await (sender as RTCRtpSender).replaceTrack(track); await (sender as RTCRtpSender).replaceTrack(track);
} }
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,6 @@ let platform: PlatformUtils;
* See available event listeners at [[StreamManagerEventMap]]. * See available event listeners at [[StreamManagerEventMap]].
*/ */
export abstract class StreamManager extends EventDispatcher { export abstract class StreamManager extends EventDispatcher {
/** /**
* The Stream represented in the DOM by the Publisher/Subscriber * The Stream represented in the DOM by the Publisher/Subscriber
*/ */
@ -126,7 +125,14 @@ export abstract class StreamManager extends EventDispatcher {
id: '', id: '',
canplayListenerAdded: false canplayListenerAdded: false
}; };
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) { if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
this.firstVideoElement.video.playsInline = true; this.firstVideoElement.video.playsInline = true;
} }
this.targetElement = targEl; this.targetElement = targEl;
@ -144,7 +150,6 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.on]] * See [[EventDispatcher.on]]
*/ */
on<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this { on<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this {
super.onAux(type, "Event '" + type + "' triggered by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler); super.onAux(type, "Event '" + type + "' triggered by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler);
if (type === 'videoElementCreated') { if (type === 'videoElementCreated') {
@ -154,11 +159,14 @@ export abstract class StreamManager extends EventDispatcher {
} }
} }
if (type === 'streamPlaying') { if (type === 'streamPlaying') {
if (this.videos[0] && this.videos[0].video && if (
this.videos[0] &&
this.videos[0].video &&
this.videos[0].video.currentTime > 0 && this.videos[0].video.currentTime > 0 &&
this.videos[0].video.paused === false && this.videos[0].video.paused === false &&
this.videos[0].video.ended === false && this.videos[0].video.ended === false &&
this.videos[0].video.readyState === 4) { this.videos[0].video.readyState === 4
) {
this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]); this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]);
} }
} }
@ -180,7 +188,6 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.once]] * See [[EventDispatcher.once]]
*/ */
once<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this { once<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this {
super.onceAux(type, "Event '" + type + "' triggered once by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler); super.onceAux(type, "Event '" + type + "' triggered once by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler);
if (type === 'videoElementCreated') { if (type === 'videoElementCreated') {
@ -189,11 +196,14 @@ export abstract class StreamManager extends EventDispatcher {
} }
} }
if (type === 'streamPlaying') { if (type === 'streamPlaying') {
if (this.videos[0] && this.videos[0].video && if (
this.videos[0] &&
this.videos[0].video &&
this.videos[0].video.currentTime > 0 && this.videos[0].video.currentTime > 0 &&
this.videos[0].video.paused === false && this.videos[0].video.paused === false &&
this.videos[0].video.ended === false && this.videos[0].video.ended === false &&
this.videos[0].video.readyState === 4) { this.videos[0].video.readyState === 4
) {
this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]); this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]);
} }
} }
@ -215,19 +225,20 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.off]] * See [[EventDispatcher.off]]
*/ */
off<K extends keyof StreamManagerEventMap>(type: K, handler?: (event: StreamManagerEventMap[K]) => void): this { off<K extends keyof StreamManagerEventMap>(type: K, handler?: (event: StreamManagerEventMap[K]) => void): this {
super.offAux(type, handler); super.offAux(type, handler);
if (type === 'publisherStartSpeaking') { if (type === 'publisherStartSpeaking') {
// Both StreamManager and Session can have "publisherStartSpeaking" event listeners // Both StreamManager and Session can have "publisherStartSpeaking" event listeners
const remainingStartSpeakingEventListeners = this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length; const remainingStartSpeakingEventListeners =
this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
if (remainingStartSpeakingEventListeners === 0) { if (remainingStartSpeakingEventListeners === 0) {
this.stream.disableHarkSpeakingEvent(false); this.stream.disableHarkSpeakingEvent(false);
} }
} }
if (type === 'publisherStopSpeaking') { if (type === 'publisherStopSpeaking') {
// Both StreamManager and Session can have "publisherStopSpeaking" event listeners // Both StreamManager and Session can have "publisherStopSpeaking" event listeners
const remainingStopSpeakingEventListeners = this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length; const remainingStopSpeakingEventListeners =
this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
if (remainingStopSpeakingEventListeners === 0) { if (remainingStopSpeakingEventListeners === 0) {
this.stream.disableHarkStoppedSpeakingEvent(false); this.stream.disableHarkStoppedSpeakingEvent(false);
} }
@ -255,7 +266,6 @@ export abstract class StreamManager extends EventDispatcher {
* Publisher/Subscriber and has been successfully disassociated from that one and properly added to this one. * Publisher/Subscriber and has been successfully disassociated from that one and properly added to this one.
*/ */
addVideoElement(video: HTMLVideoElement): number { addVideoElement(video: HTMLVideoElement): number {
this.initializeVideoProperties(video); this.initializeVideoProperties(video);
if (!this.remote && this.stream.displayMyRemote()) { if (!this.remote && this.stream.displayMyRemote()) {
@ -280,7 +290,7 @@ export abstract class StreamManager extends EventDispatcher {
} }
} }
this.stream.session.streamManagers.forEach(streamManager => { this.stream.session.streamManagers.forEach((streamManager) => {
streamManager.disassociateVideo(video); streamManager.disassociateVideo(video);
}); });
@ -370,12 +380,22 @@ export abstract class StreamManager extends EventDispatcher {
* - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms) * - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms)
* - `threshold`: (number) the volume at which _publisherStartSpeaking_, _publisherStopSpeaking_ events will be fired. Default **-50** (dB) * - `threshold`: (number) the volume at which _publisherStartSpeaking_, _publisherStopSpeaking_ events will be fired. Default **-50** (dB)
*/ */
updatePublisherSpeakingEventsOptions(publisherSpeakingEventsOptions: { interval?: number, threshold?: number }): void { updatePublisherSpeakingEventsOptions(publisherSpeakingEventsOptions: { interval?: number; threshold?: number }): void {
const currentHarkOptions = !!this.stream.harkOptions ? this.stream.harkOptions : (this.stream.session.openvidu.advancedConfiguration.publisherSpeakingEventsOptions || {}); const currentHarkOptions = !!this.stream.harkOptions
const newInterval = (typeof publisherSpeakingEventsOptions.interval === 'number') ? ? this.stream.harkOptions
publisherSpeakingEventsOptions.interval : ((typeof currentHarkOptions.interval === 'number') ? currentHarkOptions.interval : 100); : this.stream.session.openvidu.advancedConfiguration.publisherSpeakingEventsOptions || {};
const newThreshold = (typeof publisherSpeakingEventsOptions.threshold === 'number') ? const newInterval =
publisherSpeakingEventsOptions.threshold : ((typeof currentHarkOptions.threshold === 'number') ? currentHarkOptions.threshold : -50); typeof publisherSpeakingEventsOptions.interval === 'number'
? publisherSpeakingEventsOptions.interval
: typeof currentHarkOptions.interval === 'number'
? currentHarkOptions.interval
: 100;
const newThreshold =
typeof publisherSpeakingEventsOptions.threshold === 'number'
? publisherSpeakingEventsOptions.threshold
: typeof currentHarkOptions.threshold === 'number'
? currentHarkOptions.threshold
: -50;
this.stream.harkOptions = { this.stream.harkOptions = {
interval: newInterval, interval: newInterval,
threshold: newThreshold threshold: newThreshold
@ -402,7 +422,14 @@ export abstract class StreamManager extends EventDispatcher {
video.autoplay = true; video.autoplay = true;
video.controls = false; video.controls = false;
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) { if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
video.playsInline = true; video.playsInline = true;
} }
@ -440,7 +467,7 @@ export abstract class StreamManager extends EventDispatcher {
} }
} }
this.videos.forEach(streamManagerVideo => { this.videos.forEach((streamManagerVideo) => {
// Remove oncanplay event listener (only OpenVidu browser listener, not the user ones) // Remove oncanplay event listener (only OpenVidu browser listener, not the user ones)
if (!!streamManagerVideo.video && !!streamManagerVideo.video.removeEventListener) { if (!!streamManagerVideo.video && !!streamManagerVideo.video.removeEventListener) {
streamManagerVideo.video.removeEventListener('canplay', this.canPlayListener); streamManagerVideo.video.removeEventListener('canplay', this.canPlayListener);
@ -450,12 +477,14 @@ export abstract class StreamManager extends EventDispatcher {
// Only remove from DOM videos created by OpenVidu Browser (those generated by passing a valid targetElement in OpenVidu.initPublisher // Only remove from DOM videos created by OpenVidu Browser (those generated by passing a valid targetElement in OpenVidu.initPublisher
// and Session.subscribe or those created by StreamManager.createVideoElement). All this videos triggered a videoElementCreated event // and Session.subscribe or those created by StreamManager.createVideoElement). All this videos triggered a videoElementCreated event
streamManagerVideo.video.parentNode!.removeChild(streamManagerVideo.video); streamManagerVideo.video.parentNode!.removeChild(streamManagerVideo.video);
this.ee.emitEvent('videoElementDestroyed', [new VideoElementEvent(streamManagerVideo.video, this, 'videoElementDestroyed')]); this.ee.emitEvent('videoElementDestroyed', [
new VideoElementEvent(streamManagerVideo.video, this, 'videoElementDestroyed')
]);
} }
// Remove srcObject from the video // Remove srcObject from the video
this.removeSrcObject(streamManagerVideo); this.removeSrcObject(streamManagerVideo);
// Remove from collection of videos every video managed by OpenVidu Browser // Remove from collection of videos every video managed by OpenVidu Browser
this.videos.filter(v => !v.targetElement); this.videos.filter((v) => !v.targetElement);
}); });
} }
@ -480,7 +509,7 @@ export abstract class StreamManager extends EventDispatcher {
* @hidden * @hidden
*/ */
addPlayEventToFirstVideo() { addPlayEventToFirstVideo() {
if ((!!this.videos[0]) && (!!this.videos[0].video) && (!this.videos[0].canplayListenerAdded)) { if (!!this.videos[0] && !!this.videos[0].video && !this.videos[0].canplayListenerAdded) {
this.activateStreamPlayingEventExceptionTimeout(); this.activateStreamPlayingEventExceptionTimeout();
this.videos[0].video.addEventListener('canplay', this.canPlayListener); this.videos[0].video.addEventListener('canplay', this.canPlayListener);
this.videos[0].canplayListenerAdded = true; this.videos[0].canplayListenerAdded = true;
@ -491,7 +520,7 @@ export abstract class StreamManager extends EventDispatcher {
* @hidden * @hidden
*/ */
updateMediaStream(mediaStream: MediaStream) { updateMediaStream(mediaStream: MediaStream) {
this.videos.forEach(streamManagerVideo => { this.videos.forEach((streamManagerVideo) => {
streamManagerVideo.video.srcObject = mediaStream; streamManagerVideo.video.srcObject = mediaStream;
if (platform.isIonicIos()) { if (platform.isIonicIos()) {
// iOS Ionic. LIMITATION: must reinsert the video in the DOM for // iOS Ionic. LIMITATION: must reinsert the video in the DOM for
@ -512,8 +541,8 @@ export abstract class StreamManager extends EventDispatcher {
} }
/** /**
* @hidden * @hidden
*/ */
createVideo(): HTMLVideoElement { createVideo(): HTMLVideoElement {
return document.createElement('video'); return document.createElement('video');
} }
@ -569,9 +598,18 @@ export abstract class StreamManager extends EventDispatcher {
// Trigger ExceptionEvent NO_STREAM_PLAYING_EVENT if after timeout there is no 'canplay' event // Trigger ExceptionEvent NO_STREAM_PLAYING_EVENT if after timeout there is no 'canplay' event
const msTimeout = this.stream.session.openvidu.advancedConfiguration.noStreamPlayingEventExceptionTimeout || 4000; const msTimeout = this.stream.session.openvidu.advancedConfiguration.noStreamPlayingEventExceptionTimeout || 4000;
this.streamPlayingEventExceptionTimeout = setTimeout(() => { this.streamPlayingEventExceptionTimeout = setTimeout(() => {
const msg = 'StreamManager of Stream ' + this.stream.streamId + ' (' + (this.remote ? 'Subscriber' : 'Publisher') + ') did not trigger "streamPlaying" event in ' + msTimeout + ' ms'; const msg =
'StreamManager of Stream ' +
this.stream.streamId +
' (' +
(this.remote ? 'Subscriber' : 'Publisher') +
') did not trigger "streamPlaying" event in ' +
msTimeout +
' ms';
logger.warn(msg); logger.warn(msg);
this.stream.session.emitEvent('exception', [new ExceptionEvent(this.stream.session, ExceptionEventName.NO_STREAM_PLAYING_EVENT, (<any>this) as Subscriber, msg)]); this.stream.session.emitEvent('exception', [
new ExceptionEvent(this.stream.session, ExceptionEventName.NO_STREAM_PLAYING_EVENT, (<any>this) as Subscriber, msg)
]);
delete this.streamPlayingEventExceptionTimeout; delete this.streamPlayingEventExceptionTimeout;
}, msTimeout); }, msTimeout);
} }
@ -580,5 +618,4 @@ export abstract class StreamManager extends EventDispatcher {
clearTimeout(this.streamPlayingEventExceptionTimeout as any); clearTimeout(this.streamPlayingEventExceptionTimeout as any);
delete this.streamPlayingEventExceptionTimeout; delete this.streamPlayingEventExceptionTimeout;
} }
} }

View File

@ -27,11 +27,10 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
/** /**
* Packs remote media streams. Participants automatically receive them when others publish their streams. Initialized with [[Session.subscribe]] method * Packs remote media streams. Participants automatically receive them when others publish their streams. Initialized with [[Session.subscribe]] method
* *
* See available event listeners at [[StreamManagerEventMap]]. * See available event listeners at [[StreamManagerEventMap]].
*/ */
export class Subscriber extends StreamManager { export class Subscriber extends StreamManager {
/** /**
* @hidden * @hidden
*/ */
@ -52,9 +51,12 @@ export class Subscriber extends StreamManager {
* @param value `true` to subscribe to the audio stream, `false` to unsubscribe from it * @param value `true` to subscribe to the audio stream, `false` to unsubscribe from it
*/ */
subscribeToAudio(value: boolean): Subscriber { subscribeToAudio(value: boolean): Subscriber {
this.stream.getMediaStream().getAudioTracks().forEach((track) => { this.stream
track.enabled = value; .getMediaStream()
}); .getAudioTracks()
.forEach((track) => {
track.enabled = value;
});
this.stream.audioActive = value; this.stream.audioActive = value;
logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its audio stream'); logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its audio stream');
return this; return this;
@ -65,9 +67,12 @@ export class Subscriber extends StreamManager {
* @param value `true` to subscribe to the video stream, `false` to unsubscribe from it * @param value `true` to subscribe to the video stream, `false` to unsubscribe from it
*/ */
subscribeToVideo(value: boolean): Subscriber { subscribeToVideo(value: boolean): Subscriber {
this.stream.getMediaStream().getVideoTracks().forEach((track) => { this.stream
track.enabled = value; .getMediaStream()
}); .getVideoTracks()
.forEach((track) => {
track.enabled = value;
});
this.stream.videoActive = value; this.stream.videoActive = value;
logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its video stream'); logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its video stream');
return this; return this;
@ -93,5 +98,4 @@ export class Subscriber extends StreamManager {
removedTrack.stop(); removedTrack.stop();
mediaStream.addTrack(track); mediaStream.addTrack(track);
} }
}
}

View File

@ -20,4 +20,4 @@ export enum LocalRecorderState {
RECORDING = 'RECORDING', RECORDING = 'RECORDING',
PAUSED = 'PAUSED', PAUSED = 'PAUSED',
FINISHED = 'FINISHED' FINISHED = 'FINISHED'
} }

View File

@ -19,7 +19,6 @@
* Defines property [[OpenViduError.name]] * Defines property [[OpenViduError.name]]
*/ */
export enum OpenViduErrorName { export enum OpenViduErrorName {
/** /**
* Browser is not supported by OpenVidu. * Browser is not supported by OpenVidu.
* Returned upon unsuccessful [[Session.connect]] * Returned upon unsuccessful [[Session.connect]]
@ -38,7 +37,7 @@ export enum OpenViduErrorName {
* error occurred at the OS, browser or web page level, which prevented access to the device. * error occurred at the OS, browser or web page level, which prevented access to the device.
* Returned upon unsuccessful [[OpenVidu.initPublisher]] or [[OpenVidu.getUserMedia]] * Returned upon unsuccessful [[OpenVidu.initPublisher]] or [[OpenVidu.getUserMedia]]
*/ */
DEVICE_ALREADY_IN_USE = "DEVICE_ALREADY_IN_USE", DEVICE_ALREADY_IN_USE = 'DEVICE_ALREADY_IN_USE',
/** /**
* The user hasn't granted permissions to capture some desktop screen when the browser asked for them. * The user hasn't granted permissions to capture some desktop screen when the browser asked for them.
@ -122,7 +121,6 @@ export enum OpenViduErrorName {
* Simple object to identify runtime errors on the client side * Simple object to identify runtime errors on the client side
*/ */
export class OpenViduError { export class OpenViduError {
/** /**
* Uniquely identifying name of the error * Uniquely identifying name of the error
*/ */
@ -140,5 +138,4 @@ export class OpenViduError {
this.name = name; this.name = name;
this.message = message; this.message = message;
} }
}
}

View File

@ -20,4 +20,4 @@ export enum TypeOfVideo {
SCREEN = 'SCREEN', SCREEN = 'SCREEN',
CUSTOM = 'CUSTOM', CUSTOM = 'CUSTOM',
IPCAM = 'IPCAM' IPCAM = 'IPCAM'
} }

View File

@ -19,7 +19,6 @@
* How the video will be inserted in the DOM for Publishers and Subscribers. See [[PublisherProperties.insertMode]] and [[SubscriberProperties.insertMode]] * How the video will be inserted in the DOM for Publishers and Subscribers. See [[PublisherProperties.insertMode]] and [[SubscriberProperties.insertMode]]
*/ */
export enum VideoInsertMode { export enum VideoInsertMode {
/** /**
* Video inserted after the target element (as next sibling) * Video inserted after the target element (as next sibling)
*/ */
@ -40,5 +39,4 @@ export enum VideoInsertMode {
* Video replaces target element * Video replaces target element
*/ */
REPLACE = 'REPLACE' REPLACE = 'REPLACE'
}
}

View File

@ -19,14 +19,12 @@ import { Event } from './Event';
import { Connection } from '../../OpenVidu/Connection'; import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session'; import { Session } from '../../OpenVidu/Session';
/** /**
* Triggered by: * Triggered by:
* - [[connectionCreated]] * - [[connectionCreated]]
* - [[connectionDestroyed]] * - [[connectionDestroyed]]
*/ */
export class ConnectionEvent extends Event { export class ConnectionEvent extends Event {
/** /**
* Connection object that was created or destroyed * Connection object that was created or destroyed
*/ */
@ -58,6 +56,5 @@ export class ConnectionEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -25,7 +25,6 @@ import { Event } from './Event';
* Triggered by [[connectionPropertyChanged]] * Triggered by [[connectionPropertyChanged]]
*/ */
export class ConnectionPropertyChangedEvent extends Event { export class ConnectionPropertyChangedEvent extends Event {
/** /**
* The Connection whose property has changed * The Connection whose property has changed
*/ */
@ -61,6 +60,5 @@ export class ConnectionPropertyChangedEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
} }

View File

@ -20,7 +20,6 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
import { Session } from '../../OpenVidu/Session'; import { Session } from '../../OpenVidu/Session';
export abstract class Event { export abstract class Event {
/** /**
* Whether the event has a default behavior that may be prevented by calling [[Event.preventDefault]] * Whether the event has a default behavior that may be prevented by calling [[Event.preventDefault]]
*/ */
@ -73,7 +72,7 @@ export abstract class Event {
*/ */
preventDefault() { preventDefault() {
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
this.callDefaultBehavior = () => { }; this.callDefaultBehavior = () => {};
this.hasBeenPrevented = true; this.hasBeenPrevented = true;
} }
@ -81,5 +80,4 @@ export abstract class Event {
* @hidden * @hidden
*/ */
abstract callDefaultBehavior(); abstract callDefaultBehavior();
}
}

View File

@ -18,4 +18,4 @@
/** /**
* All OpenVidu Browser events inherit from this interface * All OpenVidu Browser events inherit from this interface
*/ */
export interface EventMap { } export interface EventMap {}

View File

@ -21,19 +21,18 @@ import { StreamManagerEventMap } from './StreamManagerEventMap';
/** /**
* Events dispatched by [[Publisher]] object. Manage event listeners with * Events dispatched by [[Publisher]] object. Manage event listeners with
* [[Publisher.on]], [[Publisher.once]] and [[Publisher.off]] methods. * [[Publisher.on]], [[Publisher.once]] and [[Publisher.off]] methods.
* *
* Example: * Example:
* *
* ```javascript * ```javascript
* publisher.on('accessDenied', () => { * publisher.on('accessDenied', () => {
* console.error('Camera access has been denied!'); * console.error('Camera access has been denied!');
* } * }
* *
* publisher.off('accessDenied'); * publisher.off('accessDenied');
* ``` * ```
*/ */
export interface PublisherEventMap extends StreamManagerEventMap { export interface PublisherEventMap extends StreamManagerEventMap {
/** /**
* Event dispatched when the [[Publisher]] has been published to the session (see [[Session.publish]]). * Event dispatched when the [[Publisher]] has been published to the session (see [[Session.publish]]).
*/ */
@ -46,7 +45,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/** /**
* Event dispatched when a Publisher tries to access some media input device and has the required permissions to do so. * Event dispatched when a Publisher tries to access some media input device and has the required permissions to do so.
* *
* This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application * This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application
* has permissions to use the devices. This usually means the user has accepted the permissions dialog that the * has permissions to use the devices. This usually means the user has accepted the permissions dialog that the
* browser will show when trying to access the camera/microphone/screen. * browser will show when trying to access the camera/microphone/screen.
@ -55,7 +54,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/** /**
* Event dispatched when a Publisher tries to access some media input device and does NOT have the required permissions to do so. * Event dispatched when a Publisher tries to access some media input device and does NOT have the required permissions to do so.
* *
* This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application * This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application
* lacks the required permissions to use the devices. This usually means the user has NOT accepted the permissions dialog that the * lacks the required permissions to use the devices. This usually means the user has NOT accepted the permissions dialog that the
* browser will show when trying to access the camera/microphone/screen. * browser will show when trying to access the camera/microphone/screen.
@ -64,7 +63,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/** /**
* Event dispatched when the pop-up shown by the browser to request permissions for the input media devices is opened. * Event dispatched when the pop-up shown by the browser to request permissions for the input media devices is opened.
* *
* You can use this event to alert the user about granting permissions for your website. Note that this event is artificially * You can use this event to alert the user about granting permissions for your website. Note that this event is artificially
* generated based only on time intervals when accessing media devices. A heavily overloaded client device that simply takes more * generated based only on time intervals when accessing media devices. A heavily overloaded client device that simply takes more
* than usual to access the media device could produce a false trigger of this event. * than usual to access the media device could produce a false trigger of this event.
@ -74,8 +73,8 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/** /**
* Event dispatched after the user clicks on "Allow" or "Block" in the pop-up shown by the browser to request permissions * Event dispatched after the user clicks on "Allow" or "Block" in the pop-up shown by the browser to request permissions
* for the input media devices. * for the input media devices.
* *
* This event can only be triggered after an [[accessDialogOpened]] event has been previously triggered. * This event can only be triggered after an [[accessDialogOpened]] event has been previously triggered.
*/ */
accessDialogClosed: never; accessDialogClosed: never;
} }

View File

@ -30,19 +30,18 @@ import { StreamPropertyChangedEvent } from '../StreamPropertyChangedEvent';
/** /**
* Events dispatched by [[Session]] object. Manage event listeners with * Events dispatched by [[Session]] object. Manage event listeners with
* [[Session.on]], [[Session.once]] and [[Session.off]] methods. * [[Session.on]], [[Session.once]] and [[Session.off]] methods.
* *
* Example: * Example:
* *
* ```javascript * ```javascript
* session.on('connectionCreated', (event) => { * session.on('connectionCreated', (event) => {
* console.log('Connection ' + event.connection.connectionId + ' created'); * console.log('Connection ' + event.connection.connectionId + ' created');
* } * }
* *
* session.off('connectionDestroyed'); * session.off('connectionDestroyed');
* ``` * ```
*/ */
export interface SessionEventMap extends EventMap { export interface SessionEventMap extends EventMap {
/** /**
* Event dispatched when a new user has connected to the session. * Event dispatched when a new user has connected to the session.
* *

View File

@ -22,21 +22,20 @@ import { StreamPropertyChangedEvent } from '../StreamPropertyChangedEvent';
import { VideoElementEvent } from '../VideoElementEvent'; import { VideoElementEvent } from '../VideoElementEvent';
/** /**
* Events dispatched by [[StreamManager]] object. Manage event listeners with * Events dispatched by [[StreamManager]] object. Manage event listeners with
* [[StreamManager.on]], [[StreamManager.once]] and [[StreamManager.off]] methods. * [[StreamManager.on]], [[StreamManager.once]] and [[StreamManager.off]] methods.
* *
* Example: * Example:
* *
* ```javascript * ```javascript
* streamManager.on('videoElementCreated', (event) => { * streamManager.on('videoElementCreated', (event) => {
* console.log('New video element created:', event.element); * console.log('New video element created:', event.element);
* } * }
* *
* streamManager.off('videoElementCreated'); * streamManager.off('videoElementCreated');
* ``` * ```
*/ */
export interface StreamManagerEventMap extends EventMap { export interface StreamManagerEventMap extends EventMap {
/** /**
* Event dispatched when a new HTML video element has been inserted into DOM by OpenVidu Browser library. See * Event dispatched when a new HTML video element has been inserted into DOM by OpenVidu Browser library. See
* [Manage video players](/en/stable/cheatsheet/manage-videos) section. * [Manage video players](/en/stable/cheatsheet/manage-videos) section.
@ -73,7 +72,7 @@ export interface StreamManagerEventMap extends EventMap {
/** /**
* Event dispatched when the user owning the stream has started speaking. * Event dispatched when the user owning the stream has started speaking.
* *
* Extra information: * Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true). * - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]]. * - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
@ -82,10 +81,10 @@ export interface StreamManagerEventMap extends EventMap {
/** /**
* Event dispatched when the user owning the stream has stopped speaking. * Event dispatched when the user owning the stream has stopped speaking.
* *
* Extra information: * Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true). * - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]]. * - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
*/ */
publisherStopSpeaking: PublisherSpeakingEvent; publisherStopSpeaking: PublisherSpeakingEvent;
} }

View File

@ -20,15 +20,13 @@ import { Stream } from '../../OpenVidu/Stream';
import { Subscriber } from '../../OpenVidu/Subscriber'; import { Subscriber } from '../../OpenVidu/Subscriber';
import { Event } from './Event'; import { Event } from './Event';
/** /**
* Defines property [[ExceptionEvent.name]] * Defines property [[ExceptionEvent.name]]
*/ */
export enum ExceptionEventName { export enum ExceptionEventName {
/** /**
* There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side. * There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side.
* *
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Session]] object. * [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Session]] object.
*/ */
ICE_CANDIDATE_ERROR = 'ICE_CANDIDATE_ERROR', ICE_CANDIDATE_ERROR = 'ICE_CANDIDATE_ERROR',
@ -36,11 +34,11 @@ export enum ExceptionEventName {
/** /**
* The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState) * The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
* of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `failed` status. * of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `failed` status.
* *
* This is a terminal error that won't have any kind of possible recovery. If the client is still connected to OpenVidu Server, * This is a terminal error that won't have any kind of possible recovery. If the client is still connected to OpenVidu Server,
* then an automatic reconnection process of the media stream is immediately performed. If the ICE connection has broken due to * then an automatic reconnection process of the media stream is immediately performed. If the ICE connection has broken due to
* a total network drop, then no automatic reconnection process will be possible. * a total network drop, then no automatic reconnection process will be possible.
* *
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object. * [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object.
*/ */
ICE_CONNECTION_FAILED = 'ICE_CONNECTION_FAILED', ICE_CONNECTION_FAILED = 'ICE_CONNECTION_FAILED',
@ -48,15 +46,15 @@ export enum ExceptionEventName {
/** /**
* The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState) * The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
* of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `disconnected` status. * of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `disconnected` status.
* *
* This is not a terminal error, and it is possible for the ICE connection to be reconnected. If the client is still connected to * This is not a terminal error, and it is possible for the ICE connection to be reconnected. If the client is still connected to
* OpenVidu Server and after certain timeout the ICE connection has not reached a success or terminal status, then an automatic * OpenVidu Server and after certain timeout the ICE connection has not reached a success or terminal status, then an automatic
* reconnection process of the media stream is performed. If the ICE connection has broken due to a total network drop, then no * reconnection process of the media stream is performed. If the ICE connection has broken due to a total network drop, then no
* automatic reconnection process will be possible. * automatic reconnection process will be possible.
* *
* You can customize the timeout for the reconnection attempt with property [[OpenViduAdvancedConfiguration.iceConnectionDisconnectedExceptionTimeout]], * You can customize the timeout for the reconnection attempt with property [[OpenViduAdvancedConfiguration.iceConnectionDisconnectedExceptionTimeout]],
* which by default is 4000 milliseconds. * which by default is 4000 milliseconds.
* *
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object. * [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object.
*/ */
ICE_CONNECTION_DISCONNECTED = 'ICE_CONNECTION_DISCONNECTED', ICE_CONNECTION_DISCONNECTED = 'ICE_CONNECTION_DISCONNECTED',
@ -64,20 +62,20 @@ export enum ExceptionEventName {
/** /**
* A [[Subscriber]] object has not fired event `streamPlaying` after certain timeout. `streamPlaying` event belongs to [[StreamManagerEvent]] * A [[Subscriber]] object has not fired event `streamPlaying` after certain timeout. `streamPlaying` event belongs to [[StreamManagerEvent]]
* category. It wraps Web API native event [canplay](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canplay_event). * category. It wraps Web API native event [canplay](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canplay_event).
* *
* OpenVidu Browser can take care of the video players (see [here](/en/stable/cheatsheet/manage-videos/#let-openvidu-take-care-of-the-video-players)), * OpenVidu Browser can take care of the video players (see [here](/en/stable/cheatsheet/manage-videos/#let-openvidu-take-care-of-the-video-players)),
* or you can take care of video players on your own (see [here](/en/stable/cheatsheet/manage-videos/#you-take-care-of-the-video-players)). * or you can take care of video players on your own (see [here](/en/stable/cheatsheet/manage-videos/#you-take-care-of-the-video-players)).
* Either way, whenever a [[Subscriber]] object is commanded to attach its [[Stream]] to a video element, it is supposed to fire `streamPlaying` * Either way, whenever a [[Subscriber]] object is commanded to attach its [[Stream]] to a video element, it is supposed to fire `streamPlaying`
* event shortly after. If it does not, then we can safely assume that something wrong has happened while playing the remote video and the * event shortly after. If it does not, then we can safely assume that something wrong has happened while playing the remote video and the
* application may be notified through this specific ExceptionEvent. * application may be notified through this specific ExceptionEvent.
* *
* The timeout can be configured with property [[OpenViduAdvancedConfiguration.noStreamPlayingEventExceptionTimeout]]. By default it is 4000 milliseconds. * The timeout can be configured with property [[OpenViduAdvancedConfiguration.noStreamPlayingEventExceptionTimeout]]. By default it is 4000 milliseconds.
* *
* This is just an informative exception. It only means that a remote Stream that is supposed to be playing by a video player has not done so * This is just an informative exception. It only means that a remote Stream that is supposed to be playing by a video player has not done so
* in a reasonable time. But the lack of the event can be caused by multiple reasons. If a Subscriber is not playing its Stream, the origin * in a reasonable time. But the lack of the event can be caused by multiple reasons. If a Subscriber is not playing its Stream, the origin
* of the problem could be located at the Publisher side. Or may be caused by a transient network problem. But it also could be a problem with * of the problem could be located at the Publisher side. Or may be caused by a transient network problem. But it also could be a problem with
* autoplay permissions. Bottom line, the cause can be very varied, and depending on the application the lack of the event could even be expected. * autoplay permissions. Bottom line, the cause can be very varied, and depending on the application the lack of the event could even be expected.
* *
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Subscriber]] object. * [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Subscriber]] object.
*/ */
NO_STREAM_PLAYING_EVENT = 'NO_STREAM_PLAYING_EVENT' NO_STREAM_PLAYING_EVENT = 'NO_STREAM_PLAYING_EVENT'
@ -87,7 +85,6 @@ export enum ExceptionEventName {
* Triggered by [[SessionEventMap.exception]] * Triggered by [[SessionEventMap.exception]]
*/ */
export class ExceptionEvent extends Event { export class ExceptionEvent extends Event {
/** /**
* Name of the exception * Name of the exception
*/ */
@ -126,6 +123,5 @@ export class ExceptionEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -18,12 +18,10 @@
import { Event } from './Event'; import { Event } from './Event';
import { Filter } from '../../OpenVidu/Filter'; import { Filter } from '../../OpenVidu/Filter';
/** /**
* Defines every event dispatched by audio/video stream filters. You can subscribe to filter events by calling [[Filter.addEventListener]] * Defines every event dispatched by audio/video stream filters. You can subscribe to filter events by calling [[Filter.addEventListener]]
*/ */
export class FilterEvent extends Event { export class FilterEvent extends Event {
/** /**
* Data of the event * Data of the event
*/ */
@ -41,6 +39,5 @@ export class FilterEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -23,7 +23,6 @@ import { Connection } from '../../OpenVidu/Connection';
* Triggered by [[networkQualityLevelChanged]] * Triggered by [[networkQualityLevelChanged]]
*/ */
export class NetworkQualityLevelChangedEvent extends Event { export class NetworkQualityLevelChangedEvent extends Event {
/** /**
* New value of the network quality level * New value of the network quality level
*/ */
@ -37,7 +36,7 @@ export class NetworkQualityLevelChangedEvent extends Event {
/** /**
* Connection for whom the network quality level changed * Connection for whom the network quality level changed
*/ */
connection: Connection connection: Connection;
/** /**
* @hidden * @hidden
@ -53,6 +52,5 @@ export class NetworkQualityLevelChangedEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
} }

View File

@ -20,14 +20,12 @@ import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session'; import { Session } from '../../OpenVidu/Session';
import { StreamManager } from '../../OpenVidu/StreamManager'; import { StreamManager } from '../../OpenVidu/StreamManager';
/** /**
* Triggered by: * Triggered by:
* - `publisherStartSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStartSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStartSpeaking) objects) * - `publisherStartSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStartSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStartSpeaking) objects)
* - `publisherStopSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStopSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStopSpeaking) objects) * - `publisherStopSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStopSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStopSpeaking) objects)
*/ */
export class PublisherSpeakingEvent extends Event { export class PublisherSpeakingEvent extends Event {
/** /**
* The client that started or stopped speaking * The client that started or stopped speaking
*/ */
@ -52,6 +50,5 @@ export class PublisherSpeakingEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -18,14 +18,12 @@
import { Event } from './Event'; import { Event } from './Event';
import { Session } from '../../OpenVidu/Session'; import { Session } from '../../OpenVidu/Session';
/** /**
* Triggered by: * Triggered by:
* - [[recordingStarted]] * - [[recordingStarted]]
* - [[recordingStopped]] * - [[recordingStopped]]
*/ */
export class RecordingEvent extends Event { export class RecordingEvent extends Event {
/** /**
* The recording ID generated in openvidu-server * The recording ID generated in openvidu-server
*/ */
@ -68,6 +66,5 @@ export class RecordingEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -24,12 +24,10 @@ import { OpenViduLogger } from '../Logger/OpenViduLogger';
*/ */
const logger: OpenViduLogger = OpenViduLogger.getInstance(); const logger: OpenViduLogger = OpenViduLogger.getInstance();
/** /**
* Triggered by [[sessionDisconnected]] * Triggered by [[sessionDisconnected]]
*/ */
export class SessionDisconnectedEvent extends Event { export class SessionDisconnectedEvent extends Event {
/** /**
* - "disconnect": you have called `Session.disconnect()` * - "disconnect": you have called `Session.disconnect()`
* - "forceDisconnectByUser": you have been evicted from the Session by other user calling `Session.forceDisconnect()` * - "forceDisconnectByUser": you have been evicted from the Session by other user calling `Session.forceDisconnect()`
@ -39,7 +37,7 @@ export class SessionDisconnectedEvent extends Event {
* Session object will always have previously dispatched a `reconnecting` event. If the reconnection process succeeds, * Session object will always have previously dispatched a `reconnecting` event. If the reconnection process succeeds,
* Session object will dispatch a `reconnected` event. If it fails, Session object will dispatch a SessionDisconnectedEvent * Session object will dispatch a `reconnected` event. If it fails, Session object will dispatch a SessionDisconnectedEvent
* with reason "networkDisconnect" * with reason "networkDisconnect"
* - "nodeCrashed": a node has crashed in the server side. You can use this reason to ask your application's backend to reconnect * - "nodeCrashed": a node has crashed in the server side. You can use this reason to ask your application's backend to reconnect
* to a new session to replace the crashed one * to a new session to replace the crashed one
*/ */
reason: string; reason: string;
@ -56,13 +54,12 @@ export class SessionDisconnectedEvent extends Event {
* @hidden * @hidden
*/ */
callDefaultBehavior() { callDefaultBehavior() {
logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'"); logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'");
const session = <Session>this.target; const session = <Session>this.target;
// Dispose and delete all remote Connections // Dispose and delete all remote Connections
session.remoteConnections.forEach(remoteConnection => { session.remoteConnections.forEach((remoteConnection) => {
const connectionId = remoteConnection.connectionId; const connectionId = remoteConnection.connectionId;
if (!!session.remoteConnections.get(connectionId)?.stream) { if (!!session.remoteConnections.get(connectionId)?.stream) {
session.remoteConnections.get(connectionId)?.stream!.disposeWebRtcPeer(); session.remoteConnections.get(connectionId)?.stream!.disposeWebRtcPeer();
@ -79,5 +76,4 @@ export class SessionDisconnectedEvent extends Event {
session.remoteConnections.delete(connectionId); session.remoteConnections.delete(connectionId);
}); });
} }
}
}

View File

@ -19,14 +19,12 @@ import { Event } from './Event';
import { Connection } from '../../OpenVidu/Connection'; import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session'; import { Session } from '../../OpenVidu/Session';
/** /**
* Triggered by [[SessionEventMap.signal]] * Triggered by [[SessionEventMap.signal]]
*/ */
export class SignalEvent extends Event { export class SignalEvent extends Event {
/** /**
* The type of signal. It is string `"signal"` for those signals sent with no [[SignalOptions.type]] property, and `"signal:type"` if was sent with a * The type of signal. It is string `"signal"` for those signals sent with no [[SignalOptions.type]] property, and `"signal:type"` if was sent with a
* valid [[SignalOptions.type]] property. * valid [[SignalOptions.type]] property.
* *
* The client must be specifically subscribed to `Session.on('signal:type', function(signalEvent) {...})` to trigger that type of signal. * The client must be specifically subscribed to `Session.on('signal:type', function(signalEvent) {...})` to trigger that type of signal.
@ -62,6 +60,5 @@ export class SignalEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -32,7 +32,6 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
* - `streamDestroyed` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamDestroyed) and [Publisher](/en/stable/api/openvidu-browser/interfaces/PublisherEventMap.html#streamDestroyed) objects) * - `streamDestroyed` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamDestroyed) and [Publisher](/en/stable/api/openvidu-browser/interfaces/PublisherEventMap.html#streamDestroyed) objects)
*/ */
export class StreamEvent extends Event { export class StreamEvent extends Event {
/** /**
* Stream object that was created or destroyed * Stream object that was created or destroyed
*/ */
@ -68,7 +67,6 @@ export class StreamEvent extends Event {
*/ */
callDefaultBehavior() { callDefaultBehavior() {
if (this.type === 'streamDestroyed') { if (this.type === 'streamDestroyed') {
if (this.target instanceof Session) { if (this.target instanceof Session) {
// Remote Stream // Remote Stream
logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'"); logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'");
@ -82,7 +80,7 @@ export class StreamEvent extends Event {
// Delete Publisher object from OpenVidu publishers array // Delete Publisher object from OpenVidu publishers array
const openviduPublishers = (<Publisher>this.target).openvidu.publishers; const openviduPublishers = (<Publisher>this.target).openvidu.publishers;
for (let i = 0; i < openviduPublishers.length; i++) { for (let i = 0; i < openviduPublishers.length; i++) {
if (openviduPublishers[i] === (<Publisher>this.target)) { if (openviduPublishers[i] === <Publisher>this.target) {
openviduPublishers.splice(i, 1); openviduPublishers.splice(i, 1);
break; break;
} }
@ -109,8 +107,6 @@ export class StreamEvent extends Event {
} }
} }
} }
} }
} }
}
}

View File

@ -24,12 +24,11 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
* - [[streamAudioVolumeChange]] * - [[streamAudioVolumeChange]]
*/ */
export class StreamManagerEvent extends Event { export class StreamManagerEvent extends Event {
/** /**
* For `streamAudioVolumeChange` event: * For `streamAudioVolumeChange` event:
* - `{newValue: number, oldValue: number}`: new and old audio volume values. These values are between -100 (silence) and 0 (loudest possible volume). * - `{newValue: number, oldValue: number}`: new and old audio volume values. These values are between -100 (silence) and 0 (loudest possible volume).
* They are not exact and depend on how the browser is managing the audio track, but -100 and 0 can be taken as limit values. * They are not exact and depend on how the browser is managing the audio track, but -100 and 0 can be taken as limit values.
* *
* For `streamPlaying` event undefined * For `streamPlaying` event undefined
*/ */
value: Object | undefined; value: Object | undefined;
@ -46,6 +45,5 @@ export class StreamManagerEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -24,7 +24,6 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
* Triggered by `streamPropertyChanged` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamPropertyChanged) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#streamPropertyChanged) objects) * Triggered by `streamPropertyChanged` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamPropertyChanged) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#streamPropertyChanged) objects)
*/ */
export class StreamPropertyChangedEvent extends Event { export class StreamPropertyChangedEvent extends Event {
/** /**
* The Stream whose property has changed. You can always identify the user publishing the changed stream by consulting property [[Stream.connection]] * The Stream whose property has changed. You can always identify the user publishing the changed stream by consulting property [[Stream.connection]]
*/ */
@ -57,7 +56,14 @@ export class StreamPropertyChangedEvent extends Event {
/** /**
* @hidden * @hidden
*/ */
constructor(target: Session | StreamManager, stream: Stream, changedProperty: string, newValue: Object, oldValue: Object, reason: string) { constructor(
target: Session | StreamManager,
stream: Stream,
changedProperty: string,
newValue: Object,
oldValue: Object,
reason: string
) {
super(false, target, 'streamPropertyChanged'); super(false, target, 'streamPropertyChanged');
this.stream = stream; this.stream = stream;
this.changedProperty = changedProperty; this.changedProperty = changedProperty;
@ -70,6 +76,5 @@ export class StreamPropertyChangedEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -18,14 +18,12 @@
import { Event } from './Event'; import { Event } from './Event';
import { StreamManager } from '../../OpenVidu/StreamManager'; import { StreamManager } from '../../OpenVidu/StreamManager';
/** /**
* Triggered by: * Triggered by:
* - [[videoElementCreated]] * - [[videoElementCreated]]
* - [[videoElementDestroyed]] * - [[videoElementDestroyed]]
*/ */
export class VideoElementEvent extends Event { export class VideoElementEvent extends Event {
/** /**
* Video element that was created or destroyed * Video element that was created or destroyed
*/ */
@ -43,6 +41,5 @@ export class VideoElementEvent extends Event {
* @hidden * @hidden
*/ */
// tslint:disable-next-line:no-empty // tslint:disable-next-line:no-empty
callDefaultBehavior() { } callDefaultBehavior() {}
}
}

View File

@ -19,4 +19,4 @@ export interface CustomMediaStreamConstraints {
constraints: MediaStreamConstraints; constraints: MediaStreamConstraints;
audioTrack: MediaStreamTrack | undefined; audioTrack: MediaStreamTrack | undefined;
videoTrack: MediaStreamTrack | undefined; videoTrack: MediaStreamTrack | undefined;
} }

View File

@ -18,4 +18,4 @@ export interface IceServerProperties {
url: string; url: string;
username?: string; username?: string;
credential?: string; credential?: string;
} }

View File

@ -29,6 +29,6 @@ export interface InboundStreamOptions {
videoActive: boolean; videoActive: boolean;
typeOfVideo: TypeOfVideo; typeOfVideo: TypeOfVideo;
frameRate: number; frameRate: number;
videoDimensions: { width: number, height: number }; videoDimensions: { width: number; height: number };
filter?: Filter; filter?: Filter;
} }

View File

@ -20,4 +20,4 @@ import { PublisherProperties } from '../Public/PublisherProperties';
export interface OutboundStreamOptions { export interface OutboundStreamOptions {
publisherProperties: PublisherProperties; publisherProperties: PublisherProperties;
mediaConstraints: MediaStreamConstraints; mediaConstraints: MediaStreamConstraints;
} }

View File

@ -19,4 +19,4 @@ export interface SessionOptions {
sessionId: string; sessionId: string;
participantId: string; participantId: string;
metadata: string; metadata: string;
} }

View File

@ -21,4 +21,4 @@ export interface SignalOptions {
type?: string; type?: string;
to?: Connection[]; to?: Connection[];
data?: string; data?: string;
} }

View File

@ -29,4 +29,4 @@ export interface StreamOptionsServer {
frameRate: number; frameRate: number;
videoDimensions: string; videoDimensions: string;
filter: Filter; filter: Filter;
} }

View File

@ -19,7 +19,6 @@
* See [[Session.capabilities]] * See [[Session.capabilities]]
*/ */
export interface Capabilities { export interface Capabilities {
/** /**
* `true` if the client can call [[Session.forceDisconnect]], `false` if not * `true` if the client can call [[Session.forceDisconnect]], `false` if not
*/ */
@ -39,5 +38,4 @@ export interface Capabilities {
* `true` if the client can call [[Session.subscribe]], `false` if not (true for every user for now) * `true` if the client can call [[Session.subscribe]], `false` if not (true for every user for now)
*/ */
subscribe: boolean; subscribe: boolean;
}
}

View File

@ -19,7 +19,6 @@
* See [[OpenVidu.getDevices]] * See [[OpenVidu.getDevices]]
*/ */
export interface Device { export interface Device {
/** /**
* `"videoinput"`, `"audioinput"` * `"videoinput"`, `"audioinput"`
*/ */
@ -34,4 +33,4 @@ export interface Device {
* Description of the device. An empty string if the user hasn't granted permissions to the site to access the device * Description of the device. An empty string if the user hasn't granted permissions to the site to access the device
*/ */
label: string; label: string;
} }

View File

@ -19,7 +19,6 @@
* See [[OpenVidu.setAdvancedConfiguration]] * See [[OpenVidu.setAdvancedConfiguration]]
*/ */
export interface OpenViduAdvancedConfiguration { export interface OpenViduAdvancedConfiguration {
/** /**
* Array of [RTCIceServer](https://developer.mozilla.org/en-US/docs/Web/API/RTCIceServer) to be used by OpenVidu Browser. By default OpenVidu will generate the required credentials to use the COTURN server hosted along OpenVidu Server * Array of [RTCIceServer](https://developer.mozilla.org/en-US/docs/Web/API/RTCIceServer) to be used by OpenVidu Browser. By default OpenVidu will generate the required credentials to use the COTURN server hosted along OpenVidu Server
* You can also set this property to string 'freeice' to force the use of free STUN servers instead (got thanks to [freeice](https://github.com/DamonOehlman/freeice) library). * You can also set this property to string 'freeice' to force the use of free STUN servers instead (got thanks to [freeice](https://github.com/DamonOehlman/freeice) library).
@ -36,7 +35,7 @@ export interface OpenViduAdvancedConfiguration {
* Custom configuration for the [[PublisherSpeakingEvent]] feature and the [StreamManagerEvent.streamAudioVolumeChange](/en/stable/api/openvidu-browser/classes/StreamManagerEvent.html) feature. It is an object which includes the following optional properties: * Custom configuration for the [[PublisherSpeakingEvent]] feature and the [StreamManagerEvent.streamAudioVolumeChange](/en/stable/api/openvidu-browser/classes/StreamManagerEvent.html) feature. It is an object which includes the following optional properties:
* - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms) * - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms)
* - `threshold`: (number) the volume at which _publisherStartSpeaking_ and _publisherStopSpeaking_ events will be fired. Default **-50** (dB) * - `threshold`: (number) the volume at which _publisherStartSpeaking_ and _publisherStopSpeaking_ events will be fired. Default **-50** (dB)
* *
* This sets the global default configuration that will affect all streams, but you can later customize these values for each specific stream by calling [[StreamManager.updatePublisherSpeakingEventsOptions]] * This sets the global default configuration that will affect all streams, but you can later customize these values for each specific stream by calling [[StreamManager.updatePublisherSpeakingEventsOptions]]
*/ */
publisherSpeakingEventsOptions?: { publisherSpeakingEventsOptions?: {
@ -47,10 +46,10 @@ export interface OpenViduAdvancedConfiguration {
/** /**
* Determines the automatic reconnection process policy. Whenever the client's network drops, OpenVidu Browser starts a reconnection process with OpenVidu Server. After network is recovered, OpenVidu Browser automatically * Determines the automatic reconnection process policy. Whenever the client's network drops, OpenVidu Browser starts a reconnection process with OpenVidu Server. After network is recovered, OpenVidu Browser automatically
* inspects all of its media streams to see their status. For any of them that are broken, it asks OpenVidu Server for a forced and silent reconnection. * inspects all of its media streams to see their status. For any of them that are broken, it asks OpenVidu Server for a forced and silent reconnection.
* *
* This policy is technically enough to recover any broken media connection after a network drop, but in practice it has been proven that OpenVidu Browser may think a media connection has properly recovered when in fact it has not. * This policy is technically enough to recover any broken media connection after a network drop, but in practice it has been proven that OpenVidu Browser may think a media connection has properly recovered when in fact it has not.
* This is not a common case, and it only affects Publisher streams, but it may occur. This property allows **forcing OpenVidu Browser to reconnect all of its outgoing media streams** after a network drop regardless of their supposed status. * This is not a common case, and it only affects Publisher streams, but it may occur. This property allows **forcing OpenVidu Browser to reconnect all of its outgoing media streams** after a network drop regardless of their supposed status.
* *
* Default to `false`. * Default to `false`.
*/ */
forceMediaReconnectionAfterNetworkDrop?: boolean; forceMediaReconnectionAfterNetworkDrop?: boolean;
@ -59,16 +58,15 @@ export interface OpenViduAdvancedConfiguration {
* The milliseconds that must elapse after triggering [[ExceptionEvent]] of name [`ICE_CONNECTION_DISCONNECTED`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#ICE_CONNECTION_DISCONNECTED) to perform an automatic reconnection process of the affected media stream. * The milliseconds that must elapse after triggering [[ExceptionEvent]] of name [`ICE_CONNECTION_DISCONNECTED`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#ICE_CONNECTION_DISCONNECTED) to perform an automatic reconnection process of the affected media stream.
* This automatic reconnection process can only take place if the client still has network connection to OpenVidu Server. If the ICE connection has broken because of a total network drop, * This automatic reconnection process can only take place if the client still has network connection to OpenVidu Server. If the ICE connection has broken because of a total network drop,
* then no reconnection process will be possible at all. * then no reconnection process will be possible at all.
* *
* Default to `4000`. * Default to `4000`.
*/ */
iceConnectionDisconnectedExceptionTimeout?: number; iceConnectionDisconnectedExceptionTimeout?: number;
/** /**
* The milliseconds that must elapse for the [[ExceptionEvent]] of name [`NO_STREAM_PLAYING_EVENT`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#NO_STREAM_PLAYING_EVENT) to be fired. * The milliseconds that must elapse for the [[ExceptionEvent]] of name [`NO_STREAM_PLAYING_EVENT`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#NO_STREAM_PLAYING_EVENT) to be fired.
* *
* Default to `4000`. * Default to `4000`.
*/ */
noStreamPlayingEventExceptionTimeout?: number; noStreamPlayingEventExceptionTimeout?: number;
} }

View File

@ -22,7 +22,6 @@ import { VideoInsertMode } from '../../Enums/VideoInsertMode';
* See [[OpenVidu.initPublisher]] * See [[OpenVidu.initPublisher]]
*/ */
export interface PublisherProperties { export interface PublisherProperties {
/** /**
* Which device should provide the audio source. Can be: * Which device should provide the audio source. Can be:
* - Property `deviceId` of a [[Device]] * - Property `deviceId` of a [[Device]]
@ -98,5 +97,4 @@ export interface PublisherProperties {
* Define a filter to apply in the Publisher's stream * Define a filter to apply in the Publisher's stream
*/ */
filter?: Filter; filter?: Filter;
} }

View File

@ -21,7 +21,6 @@ import { Connection } from '../../../OpenVidu/Connection';
* See [[Session.signal]] * See [[Session.signal]]
*/ */
export interface SignalOptions { export interface SignalOptions {
/** /**
* The actual message of the signal. * The actual message of the signal.
*/ */
@ -38,4 +37,4 @@ export interface SignalOptions {
* receive it. Participants subscribed to `Session.on('signal')` will receive all signals. * receive it. Participants subscribed to `Session.on('signal')` will receive all signals.
*/ */
type?: string; type?: string;
} }

View File

@ -17,9 +17,7 @@
import { VideoInsertMode } from '../../Enums/VideoInsertMode'; import { VideoInsertMode } from '../../Enums/VideoInsertMode';
export interface StreamManagerVideo { export interface StreamManagerVideo {
/** /**
* DOM video element displaying the StreamManager's stream * DOM video element displaying the StreamManager's stream
*/ */
@ -56,6 +54,4 @@ export interface StreamManagerVideo {
* @hidden * @hidden
*/ */
canplayListenerAdded: boolean; canplayListenerAdded: boolean;
}
}

View File

@ -21,7 +21,6 @@ import { VideoInsertMode } from '../../Enums/VideoInsertMode';
* See [[Session.subscribe]] * See [[Session.subscribe]]
*/ */
export interface SubscriberProperties { export interface SubscriberProperties {
/** /**
* How the video element of the subscriber should be inserted in the DOM * How the video element of the subscriber should be inserted in the DOM
* @default VideoInsertMode.APPEND * @default VideoInsertMode.APPEND
@ -39,5 +38,4 @@ export interface SubscriberProperties {
* @default true * @default true
*/ */
subscribeToVideo?: boolean; subscribeToVideo?: boolean;
}
}

View File

@ -1,61 +1,52 @@
function Mapper() { function Mapper() {
var sources = {}; var sources = {};
this.forEach = function (callback) {
for (var key in sources) {
var source = sources[key];
this.forEach = function (callback) { for (var key2 in source) callback(source[key2]);
for (var key in sources) { }
var source = sources[key];
for (var key2 in source)
callback(source[key2]);
}; };
};
this.get = function (id, source) { this.get = function (id, source) {
var ids = sources[source]; var ids = sources[source];
if (ids == undefined) if (ids == undefined) return undefined;
return undefined;
return ids[id]; return ids[id];
}; };
this.remove = function (id, source) { this.remove = function (id, source) {
var ids = sources[source]; var ids = sources[source];
if (ids == undefined) if (ids == undefined) return;
return;
delete ids[id]; delete ids[id];
// Check it's empty // Check it's empty
for (var i in ids) { for (var i in ids) {
return false return false;
} }
delete sources[source]; delete sources[source];
}; };
this.set = function (value, id, source) { this.set = function (value, id, source) {
if (value == undefined) if (value == undefined) return this.remove(id, source);
return this.remove(id, source);
var ids = sources[source]; var ids = sources[source];
if (ids == undefined) if (ids == undefined) sources[source] = ids = {};
sources[source] = ids = {};
ids[id] = value;
};
};
ids[id] = value;
};
}
Mapper.prototype.pop = function (id, source) { Mapper.prototype.pop = function (id, source) {
var value = this.get(id, source); var value = this.get(id, source);
if (value == undefined) if (value == undefined) return undefined;
return undefined;
this.remove(id, source); this.remove(id, source);
return value; return value;
}; };
module.exports = Mapper;
module.exports = Mapper;

View File

@ -17,5 +17,4 @@
var JsonRpcClient = require('./jsonrpcclient'); var JsonRpcClient = require('./jsonrpcclient');
exports.JsonRpcClient = JsonRpcClient;
exports.JsonRpcClient = JsonRpcClient;

View File

@ -19,9 +19,11 @@ var RpcBuilder = require('../');
var WebSocketWithReconnection = require('./transports/webSocketWithReconnection'); var WebSocketWithReconnection = require('./transports/webSocketWithReconnection');
var OpenViduLogger = require('../../../Logger/OpenViduLogger').OpenViduLogger; var OpenViduLogger = require('../../../Logger/OpenViduLogger').OpenViduLogger;
Date.now = Date.now || function () { Date.now =
return +new Date; Date.now ||
}; function () {
return +new Date();
};
var PING_INTERVAL = 5000; var PING_INTERVAL = 5000;
@ -51,7 +53,6 @@ var Logger = OpenViduLogger.getInstance();
* </pre> * </pre>
*/ */
function JsonRpcClient(configuration) { function JsonRpcClient(configuration) {
var self = this; var self = this;
var wsConfig = configuration.ws; var wsConfig = configuration.ws;
@ -71,13 +72,13 @@ function JsonRpcClient(configuration) {
var onerror = wsConfig.onerror; var onerror = wsConfig.onerror;
configuration.rpc.pull = function (params, request) { configuration.rpc.pull = function (params, request) {
request.reply(null, "push"); request.reply(null, 'push');
} };
wsConfig.onreconnecting = function () { wsConfig.onreconnecting = function () {
Logger.debug("--------- ONRECONNECTING -----------"); Logger.debug('--------- ONRECONNECTING -----------');
if (status === RECONNECTING) { if (status === RECONNECTING) {
Logger.error("Websocket already in RECONNECTING state when receiving a new ONRECONNECTING message. Ignoring it"); Logger.error('Websocket already in RECONNECTING state when receiving a new ONRECONNECTING message. Ignoring it');
return; return;
} }
@ -87,12 +88,12 @@ function JsonRpcClient(configuration) {
if (onreconnecting) { if (onreconnecting) {
onreconnecting(); onreconnecting();
} }
} };
wsConfig.onreconnected = function () { wsConfig.onreconnected = function () {
Logger.debug("--------- ONRECONNECTED -----------"); Logger.debug('--------- ONRECONNECTED -----------');
if (status === CONNECTED) { if (status === CONNECTED) {
Logger.error("Websocket already in CONNECTED state when receiving a new ONRECONNECTED message. Ignoring it"); Logger.error('Websocket already in CONNECTED state when receiving a new ONRECONNECTED message. Ignoring it');
return; return;
} }
status = CONNECTED; status = CONNECTED;
@ -102,12 +103,12 @@ function JsonRpcClient(configuration) {
if (onreconnected) { if (onreconnected) {
onreconnected(); onreconnected();
} }
} };
wsConfig.onconnected = function () { wsConfig.onconnected = function () {
Logger.debug("--------- ONCONNECTED -----------"); Logger.debug('--------- ONCONNECTED -----------');
if (status === CONNECTED) { if (status === CONNECTED) {
Logger.error("Websocket already in CONNECTED state when receiving a new ONCONNECTED message. Ignoring it"); Logger.error('Websocket already in CONNECTED state when receiving a new ONCONNECTED message. Ignoring it');
return; return;
} }
status = CONNECTED; status = CONNECTED;
@ -118,10 +119,10 @@ function JsonRpcClient(configuration) {
if (onconnected) { if (onconnected) {
onconnected(); onconnected();
} }
} };
wsConfig.onerror = function (error) { wsConfig.onerror = function (error) {
Logger.debug("--------- ONERROR -----------"); Logger.debug('--------- ONERROR -----------');
status = DISCONNECTED; status = DISCONNECTED;
@ -130,7 +131,7 @@ function JsonRpcClient(configuration) {
if (onerror) { if (onerror) {
onerror(error); onerror(error);
} }
} };
var ws = new WebSocketWithReconnection(wsConfig); var ws = new WebSocketWithReconnection(wsConfig);
@ -141,37 +142,41 @@ function JsonRpcClient(configuration) {
ping_request_timeout: configuration.rpc.heartbeatRequestTimeout ping_request_timeout: configuration.rpc.heartbeatRequestTimeout
}; };
var rpc = new RpcBuilder(RpcBuilder.packers.JsonRPC, rpcBuilderOptions, ws, var rpc = new RpcBuilder(RpcBuilder.packers.JsonRPC, rpcBuilderOptions, ws, function (request) {
function (request) { Logger.debug('Received request: ' + JSON.stringify(request));
Logger.debug('Received request: ' + JSON.stringify(request)); try {
var func = configuration.rpc[request.method];
try { if (func === undefined) {
var func = configuration.rpc[request.method]; Logger.error('Method ' + request.method + ' not registered in client');
} else {
if (func === undefined) { func(request.params, request);
Logger.error("Method " + request.method + " not registered in client");
} else {
func(request.params, request);
}
} catch (err) {
Logger.error('Exception processing request: ' + JSON.stringify(request));
Logger.error(err);
} }
}); } catch (err) {
Logger.error('Exception processing request: ' + JSON.stringify(request));
Logger.error(err);
}
});
this.send = function (method, params, callback) { this.send = function (method, params, callback) {
var requestTime = Date.now(); var requestTime = Date.now();
rpc.encode(method, params, function (error, result) { rpc.encode(method, params, function (error, result) {
if (error) { if (error) {
try { try {
Logger.error("ERROR:" + error.message + " in Request: method:" + Logger.error(
method + " params:" + JSON.stringify(params) + " request:" + 'ERROR:' +
error.request); error.message +
' in Request: method:' +
method +
' params:' +
JSON.stringify(params) +
' request:' +
error.request
);
if (error.data) { if (error.data) {
Logger.error("ERROR DATA:" + JSON.stringify(error.data)); Logger.error('ERROR DATA:' + JSON.stringify(error.data));
} }
} catch (e) {} } catch (e) {}
error.requestTime = requestTime; error.requestTime = requestTime;
@ -183,11 +188,10 @@ function JsonRpcClient(configuration) {
callback(error, result); callback(error, result);
} }
}); });
} };
function updateNotReconnectIfLessThan() { function updateNotReconnectIfLessThan() {
Logger.debug("notReconnectIfNumLessThan = " + pingNextNum + ' (old=' + Logger.debug('notReconnectIfNumLessThan = ' + pingNextNum + ' (old=' + notReconnectIfNumLessThan + ')');
notReconnectIfNumLessThan + ')');
notReconnectIfNumLessThan = pingNextNum; notReconnectIfNumLessThan = pingNextNum;
} }
@ -201,23 +205,25 @@ function JsonRpcClient(configuration) {
} }
pingNextNum++; pingNextNum++;
self.send('ping', params, (function (pingNum) { self.send(
return function (error, result) { 'ping',
if (error) { params,
Logger.debug("Error in ping request #" + pingNum + " (" + (function (pingNum) {
error.message + ")"); return function (error, result) {
if (pingNum > notReconnectIfNumLessThan) { if (error) {
enabledPings = false; Logger.debug('Error in ping request #' + pingNum + ' (' + error.message + ')');
updateNotReconnectIfLessThan(); if (pingNum > notReconnectIfNumLessThan) {
Logger.debug("Server did not respond to ping message #" + enabledPings = false;
pingNum + ". Reconnecting... "); updateNotReconnectIfLessThan();
ws.reconnectWs(); Logger.debug('Server did not respond to ping message #' + pingNum + '. Reconnecting... ');
ws.reconnectWs();
}
} }
} };
} })(pingNextNum)
})(pingNextNum)); );
} else { } else {
Logger.debug("Trying to send ping, but ping is not enabled"); Logger.debug('Trying to send ping, but ping is not enabled');
} }
} }
@ -227,7 +233,7 @@ function JsonRpcClient(configuration) {
*/ */
function usePing() { function usePing() {
if (!pingPongStarted) { if (!pingPongStarted) {
Logger.debug("Starting ping (if configured)") Logger.debug('Starting ping (if configured)');
pingPongStarted = true; pingPongStarted = true;
if (configuration.heartbeat != undefined) { if (configuration.heartbeat != undefined) {
@ -246,30 +252,29 @@ function JsonRpcClient(configuration) {
} }
this.close = function (code, reason) { this.close = function (code, reason) {
Logger.debug("Closing with code: " + code + " because: " + reason); Logger.debug('Closing with code: ' + code + ' because: ' + reason);
if (pingInterval != undefined) { if (pingInterval != undefined) {
Logger.debug("Clearing ping interval"); Logger.debug('Clearing ping interval');
clearInterval(pingInterval); clearInterval(pingInterval);
} }
pingPongStarted = false; pingPongStarted = false;
enabledPings = false; enabledPings = false;
ws.close(code, reason); ws.close(code, reason);
} };
this.reconnect = function () { this.reconnect = function () {
ws.reconnectWs(); ws.reconnectWs();
} };
this.resetPing = function () { this.resetPing = function () {
enabledPings = true; enabledPings = true;
pingNextNum = 0; pingNextNum = 0;
usePing(); usePing();
} };
this.getReadyState = function () { this.getReadyState = function () {
return ws.getReadyState(); return ws.getReadyState();
} };
} }
module.exports = JsonRpcClient;
module.exports = JsonRpcClient;

View File

@ -17,4 +17,4 @@
var WebSocketWithReconnection = require('./webSocketWithReconnection'); var WebSocketWithReconnection = require('./webSocketWithReconnection');
exports.WebSocketWithReconnection = WebSocketWithReconnection; exports.WebSocketWithReconnection = WebSocketWithReconnection;

View File

@ -14,7 +14,7 @@
* limitations under the License. * limitations under the License.
*/ */
"use strict"; 'use strict';
var OpenViduLogger = require('../../../../Logger/OpenViduLogger').OpenViduLogger; var OpenViduLogger = require('../../../../Logger/OpenViduLogger').OpenViduLogger;
var Logger = OpenViduLogger.getInstance(); var Logger = OpenViduLogger.getInstance();
@ -45,17 +45,14 @@ function WebSocketWithReconnection(config) {
var ws = new WebSocket(wsUri); var ws = new WebSocket(wsUri);
ws.onopen = () => { ws.onopen = () => {
Logger.debug("WebSocket connected to " + wsUri); Logger.debug('WebSocket connected to ' + wsUri);
if (config.onconnected) { if (config.onconnected) {
config.onconnected(); config.onconnected();
} }
}; };
ws.onerror = error => { ws.onerror = (error) => {
Logger.error( Logger.error('Could not connect to ' + wsUri + ' (invoking onerror if defined)', error);
"Could not connect to " + wsUri + " (invoking onerror if defined)",
error
);
if (config.onerror) { if (config.onerror) {
config.onerror(error); config.onerror(error);
} }
@ -64,31 +61,27 @@ function WebSocketWithReconnection(config) {
var reconnectionOnClose = () => { var reconnectionOnClose = () => {
if (ws.readyState === CLOSED) { if (ws.readyState === CLOSED) {
if (closing) { if (closing) {
Logger.debug("Connection closed by user"); Logger.debug('Connection closed by user');
} else { } else {
if (config.ismasternodecrashed()) { if (config.ismasternodecrashed()) {
Logger.error("Master Node has crashed. Stopping reconnection process"); Logger.error('Master Node has crashed. Stopping reconnection process');
} else { } else {
Logger.debug("Connection closed unexpectedly. Reconnecting..."); Logger.debug('Connection closed unexpectedly. Reconnecting...');
reconnect(MAX_RETRIES, 1); reconnect(MAX_RETRIES, 1);
} }
} }
} else { } else {
Logger.debug("Close callback from previous websocket. Ignoring it"); Logger.debug('Close callback from previous websocket. Ignoring it');
} }
}; };
ws.onclose = reconnectionOnClose; ws.onclose = reconnectionOnClose;
function reconnect(maxRetries, numRetries) { function reconnect(maxRetries, numRetries) {
Logger.debug( Logger.debug('reconnect (attempt #' + numRetries + ', max=' + maxRetries + ')');
"reconnect (attempt #" + numRetries + ", max=" + maxRetries + ")"
);
if (numRetries === 1) { if (numRetries === 1) {
if (reconnecting) { if (reconnecting) {
Logger.warn( Logger.warn('Trying to reconnect when already reconnecting... Ignoring this reconnection.');
"Trying to reconnect when already reconnecting... Ignoring this reconnection."
);
return; return;
} else { } else {
reconnecting = true; reconnecting = true;
@ -101,24 +94,22 @@ function WebSocketWithReconnection(config) {
} }
function addReconnectionQueryParamsIfMissing(uriString) { function addReconnectionQueryParamsIfMissing(uriString) {
var searchParams = new URLSearchParams((new URL(uriString)).search); var searchParams = new URLSearchParams(new URL(uriString).search);
if (!searchParams.has("reconnect")) { if (!searchParams.has('reconnect')) {
uriString = (Array.from(searchParams).length > 0) ? (uriString + '&reconnect=true') : (uriString + '?reconnect=true'); uriString = Array.from(searchParams).length > 0 ? uriString + '&reconnect=true' : uriString + '?reconnect=true';
} }
return uriString; return uriString;
} }
function reconnectAux(maxRetries, numRetries) { function reconnectAux(maxRetries, numRetries) {
Logger.debug("Reconnection attempt #" + numRetries); Logger.debug('Reconnection attempt #' + numRetries);
ws.close(4104, 'Connection closed for reconnection'); ws.close(4104, 'Connection closed for reconnection');
wsUri = addReconnectionQueryParamsIfMissing(wsUri); wsUri = addReconnectionQueryParamsIfMissing(wsUri);
ws = new WebSocket(wsUri); ws = new WebSocket(wsUri);
ws.onopen = () => { ws.onopen = () => {
Logger.debug( Logger.debug('Reconnected to ' + wsUri + ' after ' + numRetries + ' attempts...');
"Reconnected to " + wsUri + " after " + numRetries + " attempts..."
);
reconnecting = false; reconnecting = false;
registerMessageHandler(); registerMessageHandler();
if (config.onreconnected()) { if (config.onreconnected()) {
@ -127,8 +118,8 @@ function WebSocketWithReconnection(config) {
ws.onclose = reconnectionOnClose; ws.onclose = reconnectionOnClose;
}; };
ws.onerror = error => { ws.onerror = (error) => {
Logger.warn("Reconnection error: ", error); Logger.warn('Reconnection error: ', error);
if (numRetries === maxRetries) { if (numRetries === maxRetries) {
if (config.ondisconnect) { if (config.ondisconnect) {
config.ondisconnect(); config.ondisconnect();
@ -147,11 +138,11 @@ function WebSocketWithReconnection(config) {
}; };
this.reconnectWs = () => { this.reconnectWs = () => {
Logger.debug("reconnectWs"); Logger.debug('reconnectWs');
reconnect(MAX_RETRIES, 1); reconnect(MAX_RETRIES, 1);
}; };
this.send = message => { this.send = (message) => {
ws.send(message); ws.send(message);
}; };
@ -164,7 +155,7 @@ function WebSocketWithReconnection(config) {
this.getReadyState = () => { this.getReadyState = () => {
return ws.readyState; return ws.readyState;
} };
} }
module.exports = WebSocketWithReconnection; module.exports = WebSocketWithReconnection;

View File

@ -11,39 +11,34 @@
* @return {String} - the stringified JsonRPC 2.0 message * @return {String} - the stringified JsonRPC 2.0 message
*/ */
function pack(message, id) { function pack(message, id) {
var result = { var result = {
jsonrpc: "2.0" jsonrpc: '2.0'
}; };
// Request // Request
if (message.method) { if (message.method) {
result.method = message.method; result.method = message.method;
if (message.params) if (message.params) result.params = message.params;
result.params = message.params;
// Request is a notification // Request is a notification
if (id != undefined) if (id != undefined) result.id = id;
result.id = id; }
}
// Response // Response
else if (id != undefined) { else if (id != undefined) {
if (message.error) { if (message.error) {
if (message.result !== undefined) if (message.result !== undefined) throw new TypeError('Both result and error are defined');
throw new TypeError("Both result and error are defined");
result.error = message.error; result.error = message.error;
} else if (message.result !== undefined) } else if (message.result !== undefined) result.result = message.result;
result.result = message.result; else throw new TypeError('No result or error is defined');
else
throw new TypeError("No result or error is defined");
result.id = id; result.id = id;
}; }
return JSON.stringify(result); return JSON.stringify(result);
}; }
/** /**
* Unpack a JsonRPC 2.0 message * Unpack a JsonRPC 2.0 message
@ -55,41 +50,36 @@ function pack(message, id) {
* @return {Object} - object filled with the JsonRPC 2.0 message content * @return {Object} - object filled with the JsonRPC 2.0 message content
*/ */
function unpack(message) { function unpack(message) {
var result = message; var result = message;
if (typeof message === 'string' || message instanceof String) { if (typeof message === 'string' || message instanceof String) {
result = JSON.parse(message); result = JSON.parse(message);
} }
// Check if it's a valid message // Check if it's a valid message
var version = result.jsonrpc; var version = result.jsonrpc;
if (version !== '2.0') if (version !== '2.0') throw new TypeError("Invalid JsonRPC version '" + version + "': " + message);
throw new TypeError("Invalid JsonRPC version '" + version + "': " + message);
// Response // Response
if (result.method == undefined) { if (result.method == undefined) {
if (result.id == undefined) if (result.id == undefined) throw new TypeError('Invalid message: ' + message);
throw new TypeError("Invalid message: " + message);
var result_defined = result.result !== undefined; var result_defined = result.result !== undefined;
var error_defined = result.error !== undefined; var error_defined = result.error !== undefined;
// Check only result or error is defined, not both or none // Check only result or error is defined, not both or none
if (result_defined && error_defined) if (result_defined && error_defined) throw new TypeError('Both result and error are defined: ' + message);
throw new TypeError("Both result and error are defined: " + message);
if (!result_defined && !error_defined) if (!result_defined && !error_defined) throw new TypeError('No result or error is defined: ' + message);
throw new TypeError("No result or error is defined: " + message);
result.ack = result.id; result.ack = result.id;
delete result.id; delete result.id;
} }
// Return unpacked message
return result;
};
// Return unpacked message
return result;
}
exports.pack = pack; exports.pack = pack;
exports.unpack = unpack; exports.unpack = unpack;

View File

@ -1,10 +1,10 @@
function pack(message) { function pack(message) {
throw new TypeError("Not yet implemented"); throw new TypeError('Not yet implemented');
}; }
function unpack(message) { function unpack(message) {
throw new TypeError("Not yet implemented"); throw new TypeError('Not yet implemented');
}; }
exports.pack = pack; exports.pack = pack;
exports.unpack = unpack; exports.unpack = unpack;

View File

@ -1,6 +1,5 @@
var JsonRPC = require('./JsonRPC'); var JsonRPC = require('./JsonRPC');
var XmlRPC = require('./XmlRPC'); var XmlRPC = require('./XmlRPC');
exports.JsonRPC = JsonRPC; exports.JsonRPC = JsonRPC;
exports.XmlRPC = XmlRPC; exports.XmlRPC = XmlRPC;

View File

@ -1,42 +1,41 @@
type ConsoleFunction = (...data: any) => void; type ConsoleFunction = (...data: any) => void;
export class ConsoleLogger { export class ConsoleLogger {
/**
* @hidden
*/
logger: Console;
/** /**
* @hidden * @hidden
*/ */
logger: Console log: ConsoleFunction;
/** /**
* @hidden * @hidden
*/ */
log: ConsoleFunction info: ConsoleFunction;
/** /**
* @hidden * @hidden
*/ */
info: ConsoleFunction debug: ConsoleFunction;
/** /**
* @hidden * @hidden
*/ */
debug: ConsoleFunction warn: ConsoleFunction;
/** /**
* @hidden * @hidden
*/ */
warn: ConsoleFunction error: ConsoleFunction;
/**
* @hidden
*/
error: ConsoleFunction
constructor(console: Console) { constructor(console: Console) {
this.logger = console; this.logger = console;
this.log = console.log, (this.log = console.log),
this.info = console.info, (this.info = console.info),
this.debug = console.debug, (this.debug = console.debug),
this.warn = console.warn, (this.warn = console.warn),
this.error = console.error (this.error = console.error);
} }
} }

View File

@ -1,283 +1,285 @@
import { JL } from 'jsnlog' import { JL } from 'jsnlog';
import { OpenVidu } from "../../OpenVidu/OpenVidu"; import { OpenVidu } from '../../OpenVidu/OpenVidu';
import { ConsoleLogger } from './ConsoleLogger'; import { ConsoleLogger } from './ConsoleLogger';
import { OpenViduLoggerConfiguration } from "./OpenViduLoggerConfiguration"; import { OpenViduLoggerConfiguration } from './OpenViduLoggerConfiguration';
export class OpenViduLogger { export class OpenViduLogger {
private static instance: OpenViduLogger;
private static instance: OpenViduLogger; private JSNLOG_URL: string = '/openvidu/elk/openvidu-browser-logs';
private MAX_JSNLOG_BATCH_LOG_MESSAGES: number = 100;
private MAX_MSECONDS_BATCH_MESSAGES: number = 5000;
private MAX_LENGTH_STRING_JSON: number = 1000;
private JSNLOG_URL: string = "/openvidu/elk/openvidu-browser-logs"; private defaultConsoleLogger: ConsoleLogger = new ConsoleLogger(globalThis.console);
private MAX_JSNLOG_BATCH_LOG_MESSAGES: number = 100;
private MAX_MSECONDS_BATCH_MESSAGES: number = 5000;
private MAX_LENGTH_STRING_JSON: number = 1000;
private defaultConsoleLogger: ConsoleLogger = new ConsoleLogger(globalThis.console); private currentAppender: any;
private currentAppender: any; private isProdMode = false;
private isJSNLogSetup = false;
private isProdMode = false; // This two variables are used to restart JSNLog
private isJSNLogSetup = false; // on different sessions and different userIds
private loggingSessionId: string | undefined;
// This two variables are used to restart JSNLog /**
// on different sessions and different userIds * @hidden
private loggingSessionId: string | undefined; */
static configureJSNLog(openVidu: OpenVidu, token: string) {
try {
// If dev mode or...
if (
globalThis['LOG_JSNLOG_RESULTS'] ||
// If instance is created and it is OpenVidu Pro
(this.instance &&
openVidu.isAtLeastPro &&
// If logs are enabled
this.instance.isOpenViduBrowserLogsDebugActive(openVidu) &&
// Only reconfigure it if session or finalUserId has changed
this.instance.canConfigureJSNLog(openVidu, this.instance))
) {
// Check if app logs can be sent
// and replace console.log function to send
// logs of the application
if (openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app) {
this.instance.replaceWindowConsole();
}
/** // isJSNLogSetup will not be true until completed setup
* @hidden this.instance.isJSNLogSetup = false;
*/ this.instance.info('Configuring JSNLogs.');
static configureJSNLog(openVidu: OpenVidu, token: string) {
try {
// If dev mode or...
if ((globalThis['LOG_JSNLOG_RESULTS']) ||
// If instance is created and it is OpenVidu Pro
(this.instance && openVidu.isAtLeastPro
// If logs are enabled
&& this.instance.isOpenViduBrowserLogsDebugActive(openVidu)
// Only reconfigure it if session or finalUserId has changed
&& this.instance.canConfigureJSNLog(openVidu, this.instance))) {
// Check if app logs can be sent const finalUserId = openVidu.finalUserId;
// and replace console.log function to send const sessionId = openVidu.session.sessionId;
// logs of the application
if (openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app) {
this.instance.replaceWindowConsole();
}
// isJSNLogSetup will not be true until completed setup const beforeSendCallback = (xhr) => {
this.instance.isJSNLogSetup = false; // If 401 or 403 or 404 modify ready and status so JSNLog don't retry to send logs
this.instance.info("Configuring JSNLogs."); // https://github.com/mperdeck/jsnlog.js/blob/v2.30.0/jsnlog.ts#L805-L818
const parentReadyStateFunction = xhr.onreadystatechange;
xhr.onreadystatechange = () => {
if (this.isInvalidResponse(xhr)) {
Object.defineProperty(xhr, 'readyState', { value: 4 });
Object.defineProperty(xhr, 'status', { value: 200 });
// Disable JSNLog too to not send periodically errors
this.instance.disableLogger();
}
parentReadyStateFunction();
};
const finalUserId = openVidu.finalUserId; // Headers to identify and authenticate logs
const sessionId = openVidu.session.sessionId; xhr.setRequestHeader('Authorization', 'Basic ' + btoa(`${finalUserId}%/%${sessionId}` + ':' + token));
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
// Additional headers for OpenVidu
xhr.setRequestHeader('OV-Final-User-Id', finalUserId);
xhr.setRequestHeader('OV-Session-Id', sessionId);
xhr.setRequestHeader('OV-Token', token);
};
const beforeSendCallback = (xhr) => { // Creation of the appender.
// If 401 or 403 or 404 modify ready and status so JSNLog don't retry to send logs this.instance.currentAppender = JL.createAjaxAppender(`appender-${finalUserId}-${sessionId}`);
// https://github.com/mperdeck/jsnlog.js/blob/v2.30.0/jsnlog.ts#L805-L818 this.instance.currentAppender.setOptions({
const parentReadyStateFunction = xhr.onreadystatechange; beforeSend: beforeSendCallback,
xhr.onreadystatechange = () => { maxBatchSize: 1000,
if (this.isInvalidResponse(xhr)) { batchSize: this.instance.MAX_JSNLOG_BATCH_LOG_MESSAGES,
Object.defineProperty(xhr, "readyState", { value: 4 }); batchTimeout: this.instance.MAX_MSECONDS_BATCH_MESSAGES
Object.defineProperty(xhr, "status", { value: 200 }); });
// Disable JSNLog too to not send periodically errors
this.instance.disableLogger();
}
parentReadyStateFunction();
}
// Headers to identify and authenticate logs // Avoid circular dependencies
xhr.setRequestHeader('Authorization', "Basic " + btoa(`${finalUserId}%/%${sessionId}` + ":" + token)); const logSerializer = (obj): string => {
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest') const getCircularReplacer = () => {
// Additional headers for OpenVidu const seen = new WeakSet();
xhr.setRequestHeader('OV-Final-User-Id', finalUserId); return (key, value) => {
xhr.setRequestHeader('OV-Session-Id', sessionId); if (typeof value === 'object' && value != null) {
xhr.setRequestHeader('OV-Token', token); if (seen.has(value) || (globalThis.HTMLElement && value instanceof HTMLElement)) {
} return;
}
seen.add(value);
}
return value;
};
};
// Creation of the appender. // Cut long messages
this.instance.currentAppender = JL.createAjaxAppender(`appender-${finalUserId}-${sessionId}`); let stringifyJson = JSON.stringify(obj, getCircularReplacer());
this.instance.currentAppender.setOptions({ if (stringifyJson.length > this.instance.MAX_LENGTH_STRING_JSON) {
beforeSend: beforeSendCallback, stringifyJson = `${stringifyJson.substring(0, this.instance.MAX_LENGTH_STRING_JSON)}...`;
maxBatchSize: 1000, }
batchSize: this.instance.MAX_JSNLOG_BATCH_LOG_MESSAGES,
batchTimeout: this.instance.MAX_MSECONDS_BATCH_MESSAGES
});
// Avoid circular dependencies if (globalThis['LOG_JSNLOG_RESULTS']) {
const logSerializer = (obj): string => { console.log(stringifyJson);
const getCircularReplacer = () => { }
const seen = new WeakSet();
return (key, value) => {
if (typeof value === "object" && value != null) {
if (seen.has(value) || (globalThis.HTMLElement && value instanceof HTMLElement)) {
return;
}
seen.add(value);
}
return value;
};
};
// Cut long messages return stringifyJson;
let stringifyJson = JSON.stringify(obj, getCircularReplacer()); };
if (stringifyJson.length > this.instance.MAX_LENGTH_STRING_JSON) {
stringifyJson = `${stringifyJson.substring(0, this.instance.MAX_LENGTH_STRING_JSON)}...`;
}
if (globalThis['LOG_JSNLOG_RESULTS']) { // Initialize JL to send logs
console.log(stringifyJson); JL.setOptions({
} defaultAjaxUrl: openVidu.httpUri + this.instance.JSNLOG_URL,
serialize: logSerializer,
enabled: true
});
JL().setOptions({
appenders: [this.instance.currentAppender]
});
return stringifyJson; this.instance.isJSNLogSetup = true;
}; this.instance.loggingSessionId = sessionId;
this.instance.info('JSNLog configured.');
}
} catch (e) {
// Print error
console.error('Error configuring JSNLog: ');
console.error(e);
// Restore defaults values just in case any exception happen-
this.instance.disableLogger();
}
}
// Initialize JL to send logs /**
JL.setOptions({ * @hidden
defaultAjaxUrl: openVidu.httpUri + this.instance.JSNLOG_URL, */
serialize: logSerializer, static getInstance(): OpenViduLogger {
enabled: true if (!OpenViduLogger.instance) {
}); OpenViduLogger.instance = new OpenViduLogger();
JL().setOptions({ }
appenders: [this.instance.currentAppender] return OpenViduLogger.instance;
}); }
this.instance.isJSNLogSetup = true; private static isInvalidResponse(xhr: XMLHttpRequest) {
this.instance.loggingSessionId = sessionId; return xhr.status == 401 || xhr.status == 403 || xhr.status == 404 || xhr.status == 0;
this.instance.info("JSNLog configured."); }
}
} catch (e) {
// Print error
console.error("Error configuring JSNLog: ");
console.error(e);
// Restore defaults values just in case any exception happen-
this.instance.disableLogger();
}
}
/** private canConfigureJSNLog(openVidu: OpenVidu, logger: OpenViduLogger): boolean {
* @hidden return openVidu.session.sessionId != logger.loggingSessionId;
*/ }
static getInstance(): OpenViduLogger {
if (!OpenViduLogger.instance) {
OpenViduLogger.instance = new OpenViduLogger();
}
return OpenViduLogger.instance;
}
private static isInvalidResponse(xhr: XMLHttpRequest) { private isOpenViduBrowserLogsDebugActive(openVidu: OpenVidu) {
return xhr.status == 401 || xhr.status == 403 || xhr.status == 404 || xhr.status == 0; return (
} openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug ||
openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app
);
}
private canConfigureJSNLog(openVidu: OpenVidu, logger: OpenViduLogger): boolean { // Return console functions with jsnlog integration
return openVidu.session.sessionId != logger.loggingSessionId private getConsoleWithJSNLog() {
} return (function (openViduLogger: OpenViduLogger) {
return {
log: function (...args) {
openViduLogger.defaultConsoleLogger.log.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
info: function (...args) {
openViduLogger.defaultConsoleLogger.info.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
debug: function (...args) {
openViduLogger.defaultConsoleLogger.debug.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
},
warn: function (...args) {
openViduLogger.defaultConsoleLogger.warn.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().warn(arguments);
}
},
error: function (...args) {
openViduLogger.defaultConsoleLogger.error.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().error(arguments);
}
}
};
})(this);
}
private isOpenViduBrowserLogsDebugActive(openVidu: OpenVidu) { private replaceWindowConsole() {
return openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug || globalThis.console = this.defaultConsoleLogger.logger;
openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app; globalThis.console.log = this.getConsoleWithJSNLog().log;
} globalThis.console.info = this.getConsoleWithJSNLog().info;
globalThis.console.debug = this.getConsoleWithJSNLog().debug;
globalThis.console.warn = this.getConsoleWithJSNLog().warn;
globalThis.console.error = this.getConsoleWithJSNLog().error;
}
// Return console functions with jsnlog integration private disableLogger() {
private getConsoleWithJSNLog() { JL.setOptions({ enabled: false });
return function (openViduLogger: OpenViduLogger) { this.isJSNLogSetup = false;
return { this.loggingSessionId = undefined;
log: function (...args) { this.currentAppender = undefined;
openViduLogger.defaultConsoleLogger.log.apply(openViduLogger.defaultConsoleLogger.logger, arguments); globalThis.console = this.defaultConsoleLogger.logger;
if (openViduLogger.isJSNLogSetup) { globalThis.console.log = this.defaultConsoleLogger.log;
JL().info(arguments); globalThis.console.info = this.defaultConsoleLogger.info;
} globalThis.console.debug = this.defaultConsoleLogger.debug;
}, globalThis.console.warn = this.defaultConsoleLogger.warn;
info: function (...args) { globalThis.console.error = this.defaultConsoleLogger.error;
openViduLogger.defaultConsoleLogger.info.apply(openViduLogger.defaultConsoleLogger.logger, arguments); }
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
debug: function (...args) {
openViduLogger.defaultConsoleLogger.debug.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
},
warn: function (...args) {
openViduLogger.defaultConsoleLogger.warn.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().warn(arguments);
}
},
error: function (...args) {
openViduLogger.defaultConsoleLogger.error.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().error(arguments);
}
}
};
}(this);
}
private replaceWindowConsole() { /**
globalThis.console = this.defaultConsoleLogger.logger; * @hidden
globalThis.console.log = this.getConsoleWithJSNLog().log; */
globalThis.console.info = this.getConsoleWithJSNLog().info; log(...args: any[]) {
globalThis.console.debug = this.getConsoleWithJSNLog().debug; if (!this.isProdMode) {
globalThis.console.warn = this.getConsoleWithJSNLog().warn; this.defaultConsoleLogger.log.apply(this.defaultConsoleLogger.logger, arguments);
globalThis.console.error = this.getConsoleWithJSNLog().error; }
} if (this.isJSNLogSetup) {
JL().info(arguments);
}
}
private disableLogger() { /**
JL.setOptions({ enabled: false }); * @hidden
this.isJSNLogSetup = false; */
this.loggingSessionId = undefined; debug(...args: any[]) {
this.currentAppender = undefined; if (!this.isProdMode) {
globalThis.console = this.defaultConsoleLogger.logger; this.defaultConsoleLogger.debug.apply(this.defaultConsoleLogger.logger, arguments);
globalThis.console.log = this.defaultConsoleLogger.log; }
globalThis.console.info = this.defaultConsoleLogger.info; }
globalThis.console.debug = this.defaultConsoleLogger.debug;
globalThis.console.warn = this.defaultConsoleLogger.warn;
globalThis.console.error = this.defaultConsoleLogger.error;
}
/** /**
* @hidden * @hidden
*/ */
log(...args: any[]) { info(...args: any[]) {
if (!this.isProdMode) { if (!this.isProdMode) {
this.defaultConsoleLogger.log.apply(this.defaultConsoleLogger.logger, arguments); this.defaultConsoleLogger.info.apply(this.defaultConsoleLogger.logger, arguments);
} }
if (this.isJSNLogSetup) { if (this.isJSNLogSetup) {
JL().info(arguments); JL().info(arguments);
} }
} }
/** /**
* @hidden * @hidden
*/ */
debug(...args: any[]) { warn(...args: any[]) {
if (!this.isProdMode) { this.defaultConsoleLogger.warn.apply(this.defaultConsoleLogger.logger, arguments);
this.defaultConsoleLogger.debug.apply(this.defaultConsoleLogger.logger, arguments); if (this.isJSNLogSetup) {
} JL().warn(arguments);
} }
}
/** /**
* @hidden * @hidden
*/ */
info(...args: any[]) { error(...args: any[]) {
if (!this.isProdMode) { this.defaultConsoleLogger.error.apply(this.defaultConsoleLogger.logger, arguments);
this.defaultConsoleLogger.info.apply(this.defaultConsoleLogger.logger, arguments); if (this.isJSNLogSetup) {
} JL().error(arguments);
if (this.isJSNLogSetup) { }
JL().info(arguments); }
}
}
/** /**
* @hidden * @hidden
*/ */
warn(...args: any[]) { flush() {
this.defaultConsoleLogger.warn.apply(this.defaultConsoleLogger.logger, arguments); if (this.isJSNLogSetup && this.currentAppender != null) {
if (this.isJSNLogSetup) { this.currentAppender.sendBatch();
JL().warn(arguments); }
} }
}
/**
* @hidden
*/
error(...args: any[]) {
this.defaultConsoleLogger.error.apply(this.defaultConsoleLogger.logger, arguments);
if (this.isJSNLogSetup) {
JL().error(arguments);
}
}
/**
* @hidden
*/
flush() {
if (this.isJSNLogSetup && this.currentAppender != null) {
this.currentAppender.sendBatch();
}
}
enableProdMode() {
this.isProdMode = true;
}
enableProdMode() {
this.isProdMode = true;
}
} }

View File

@ -2,4 +2,4 @@ export enum OpenViduLoggerConfiguration {
disabled = 'disabled', disabled = 'disabled',
debug = 'debug', debug = 'debug',
debug_app = 'debug_app' debug_app = 'debug_app'
} }

View File

@ -54,7 +54,11 @@ globalThis.getScreenId = function (firefoxString, callback, custom_parameter) {
if (event.data.chromeMediaSourceId === 'PermissionDeniedError') { if (event.data.chromeMediaSourceId === 'PermissionDeniedError') {
callback('permission-denied'); callback('permission-denied');
} else { } else {
callback(null, event.data.chromeMediaSourceId, getScreenConstraints(null, event.data.chromeMediaSourceId, event.data.canRequestAudioTrack)); callback(
null,
event.data.chromeMediaSourceId,
getScreenConstraints(null, event.data.chromeMediaSourceId, event.data.canRequestAudioTrack)
);
} }
// this event listener is no more needed // this event listener is no more needed
@ -71,8 +75,7 @@ globalThis.getScreenId = function (firefoxString, callback, custom_parameter) {
if (!custom_parameter) { if (!custom_parameter) {
setTimeout(postGetSourceIdMessage, 100); setTimeout(postGetSourceIdMessage, 100);
} } else {
else {
setTimeout(function () { setTimeout(function () {
postGetSourceIdMessage(custom_parameter); postGetSourceIdMessage(custom_parameter);
}, 100); }, 100);
@ -95,7 +98,7 @@ function getScreenConstraints(error, sourceId, canRequestAudioTrack) {
if (!!canRequestAudioTrack) { if (!!canRequestAudioTrack) {
screen_constraints.audio = { screen_constraints.audio = {
mandatory: { mandatory: {
chromeMediaSource: error ? 'screen' : 'desktop', chromeMediaSource: error ? 'screen' : 'desktop'
// echoCancellation: true // echoCancellation: true
}, },
optional: [] optional: []
@ -129,19 +132,26 @@ function postGetSourceIdMessage(custom_parameter) {
} }
if (!custom_parameter) { if (!custom_parameter) {
iframe.contentWindow.postMessage({ iframe.contentWindow.postMessage(
captureSourceId: true {
}, '*'); captureSourceId: true
} },
else if (!!custom_parameter.forEach) { '*'
iframe.contentWindow.postMessage({ );
captureCustomSourceId: custom_parameter } else if (!!custom_parameter.forEach) {
}, '*'); iframe.contentWindow.postMessage(
} {
else { captureCustomSourceId: custom_parameter
iframe.contentWindow.postMessage({ },
captureSourceIdWithAudio: true '*'
}, '*'); );
} else {
iframe.contentWindow.postMessage(
{
captureSourceIdWithAudio: true
},
'*'
);
} }
} }
@ -212,9 +222,12 @@ function postGetChromeExtensionStatusMessage() {
return; return;
} }
iframe.contentWindow.postMessage({ iframe.contentWindow.postMessage(
getChromeExtensionStatus: true {
}, '*'); getChromeExtensionStatus: true
},
'*'
);
} }
exports.getScreenId = globalThis.getScreenId; exports.getScreenId = globalThis.getScreenId;

View File

@ -3,7 +3,7 @@ var chromeMediaSource = 'screen';
var sourceId; var sourceId;
var screenCallback; var screenCallback;
if(typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof navigator.userAgent !== 'undefined'){ if (typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof navigator.userAgent !== 'undefined') {
var isFirefox = typeof window.InstallTrigger !== 'undefined'; var isFirefox = typeof window.InstallTrigger !== 'undefined';
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0; var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera; var isChrome = !!window.chrome && !isOpera;
@ -20,10 +20,8 @@ if(typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof n
function onMessageCallback(data) { function onMessageCallback(data) {
// "cancel" button is clicked // "cancel" button is clicked
if (data == 'PermissionDeniedError') { if (data == 'PermissionDeniedError') {
if (screenCallback) if (screenCallback) return screenCallback('PermissionDeniedError');
return screenCallback('PermissionDeniedError'); else throw new Error('PermissionDeniedError');
else
throw new Error('PermissionDeniedError');
} }
// extension notified his presence // extension notified his presence
if (data == 'rtcmulticonnection-extension-loaded') { if (data == 'rtcmulticonnection-extension-loaded') {
@ -31,7 +29,7 @@ function onMessageCallback(data) {
} }
// extension shared temp sourceId // extension shared temp sourceId
if (data.sourceId && screenCallback) { if (data.sourceId && screenCallback) {
screenCallback(sourceId = data.sourceId, data.canRequestAudioTrack === true); screenCallback((sourceId = data.sourceId), data.canRequestAudioTrack === true);
} }
} }
@ -51,10 +49,8 @@ function isChromeExtensionAvailable(callback) {
// this function can be used to get "source-id" from the extension // this function can be used to get "source-id" from the extension
function getSourceId(callback) { function getSourceId(callback) {
if (!callback) if (!callback) throw '"callback" parameter is mandatory.';
throw '"callback" parameter is mandatory.'; if (sourceId) return callback(sourceId);
if (sourceId)
return callback(sourceId);
screenCallback = callback; screenCallback = callback;
window.postMessage('get-sourceId', '*'); window.postMessage('get-sourceId', '*');
} }
@ -67,9 +63,12 @@ function getCustomSourceId(arr, callback) {
if (sourceId) return callback(sourceId); if (sourceId) return callback(sourceId);
screenCallback = callback; screenCallback = callback;
window.postMessage({ window.postMessage(
'get-custom-sourceId': arr {
}, '*'); 'get-custom-sourceId': arr
},
'*'
);
} }
// this function can be used to get "source-id" from the extension // this function can be used to get "source-id" from the extension
@ -82,8 +81,7 @@ function getSourceIdWithAudio(callback) {
} }
function getChromeExtensionStatus(extensionid, callback) { function getChromeExtensionStatus(extensionid, callback) {
if (isFirefox) if (isFirefox) return callback('not-chrome');
return callback('not-chrome');
if (arguments.length != 2) { if (arguments.length != 2) {
callback = extensionid; callback = extensionid;
extensionid = 'lfcgfepafnobdloecchnfaclibenjold'; // default extension-id extensionid = 'lfcgfepafnobdloecchnfaclibenjold'; // default extension-id
@ -96,8 +94,7 @@ function getChromeExtensionStatus(extensionid, callback) {
setTimeout(function () { setTimeout(function () {
if (chromeMediaSource == 'screen') { if (chromeMediaSource == 'screen') {
callback('installed-disabled'); callback('installed-disabled');
} else } else callback('installed-enabled');
callback('installed-enabled');
}, 2000); }, 2000);
}; };
image.onerror = function () { image.onerror = function () {
@ -116,8 +113,7 @@ function getScreenConstraints(callback, captureSourceIdWithAudio) {
mozMediaSource: 'window', mozMediaSource: 'window',
mediaSource: 'window' mediaSource: 'window'
}; };
if (isFirefox) if (isFirefox) return callback(null, firefoxScreenConstraints);
return callback(null, firefoxScreenConstraints);
// this statement defines getUserMedia constraints // this statement defines getUserMedia constraints
// that will be used to capture content of screen // that will be used to capture content of screen
var screen_constraints = { var screen_constraints = {
@ -141,8 +137,7 @@ function getScreenConstraints(callback, captureSourceIdWithAudio) {
} }
callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints); callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints);
}); });
} } else {
else {
getSourceId(function (sourceId) { getSourceId(function (sourceId) {
screen_constraints.mandatory.chromeMediaSourceId = sourceId; screen_constraints.mandatory.chromeMediaSourceId = sourceId;
callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints); callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints);
@ -164,4 +159,4 @@ exports.getScreenConstraints = getScreenConstraints;
exports.getScreenConstraintsWithAudio = getScreenConstraintsWithAudio; exports.getScreenConstraintsWithAudio = getScreenConstraintsWithAudio;
exports.isChromeExtensionAvailable = isChromeExtensionAvailable; exports.isChromeExtensionAvailable = isChromeExtensionAvailable;
exports.getChromeExtensionStatus = getChromeExtensionStatus; exports.getChromeExtensionStatus = getChromeExtensionStatus;
exports.getSourceId = getSourceId; exports.getSourceId = getSourceId;

View File

@ -1,222 +1,221 @@
import platform = require('platform'); import platform = require('platform');
export class PlatformUtils { export class PlatformUtils {
protected static instance: PlatformUtils; protected static instance: PlatformUtils;
constructor() { } constructor() {}
static getInstance(): PlatformUtils { static getInstance(): PlatformUtils {
if (!this.instance) { if (!this.instance) {
this.instance = new PlatformUtils(); this.instance = new PlatformUtils();
} }
return PlatformUtils.instance; return PlatformUtils.instance;
} }
public isChromeBrowser(): boolean { public isChromeBrowser(): boolean {
return platform.name === "Chrome"; return platform.name === 'Chrome';
} }
/** /**
* @hidden * @hidden
*/ */
public isSafariBrowser(): boolean { public isSafariBrowser(): boolean {
return platform.name === "Safari"; return platform.name === 'Safari';
} }
/** /**
* @hidden * @hidden
*/ */
public isChromeMobileBrowser(): boolean { public isChromeMobileBrowser(): boolean {
return platform.name === "Chrome Mobile"; return platform.name === 'Chrome Mobile';
} }
/** /**
* @hidden * @hidden
*/ */
public isFirefoxBrowser(): boolean { public isFirefoxBrowser(): boolean {
return platform.name === "Firefox"; return platform.name === 'Firefox';
} }
/** /**
* @hidden * @hidden
*/ */
public isFirefoxMobileBrowser(): boolean { public isFirefoxMobileBrowser(): boolean {
return platform.name === "Firefox Mobile" || platform.name === "Firefox for iOS"; return platform.name === 'Firefox Mobile' || platform.name === 'Firefox for iOS';
} }
/** /**
* @hidden * @hidden
*/ */
public isOperaBrowser(): boolean { public isOperaBrowser(): boolean {
return platform.name === "Opera"; return platform.name === 'Opera';
} }
/** /**
* @hidden * @hidden
*/ */
public isOperaMobileBrowser(): boolean { public isOperaMobileBrowser(): boolean {
return platform.name === "Opera Mobile"; return platform.name === 'Opera Mobile';
} }
/** /**
* @hidden * @hidden
*/ */
public isEdgeBrowser(): boolean { public isEdgeBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1; const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === "Microsoft Edge" && version >= 80; return platform.name === 'Microsoft Edge' && version >= 80;
} }
/** /**
* @hidden * @hidden
*/ */
public isEdgeMobileBrowser(): boolean { public isEdgeMobileBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1; const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === "Microsoft Edge" && (platform.os?.family === 'Android' || platform.os?.family === 'iOS') && version > 45; return platform.name === 'Microsoft Edge' && (platform.os?.family === 'Android' || platform.os?.family === 'iOS') && version > 45;
} }
/** /**
* @hidden * @hidden
*/ */
public isAndroidBrowser(): boolean { public isAndroidBrowser(): boolean {
return platform.name === "Android Browser"; return platform.name === 'Android Browser';
} }
/** /**
* @hidden * @hidden
*/ */
public isElectron(): boolean { public isElectron(): boolean {
return platform.name === "Electron"; return platform.name === 'Electron';
} }
/** /**
* @hidden * @hidden
*/ */
public isNodeJs(): boolean { public isNodeJs(): boolean {
return platform.name === "Node.js"; return platform.name === 'Node.js';
} }
/** /**
* @hidden * @hidden
*/ */
public isSamsungBrowser(): boolean { public isSamsungBrowser(): boolean {
return ( return platform.name === 'Samsung Internet Mobile' || platform.name === 'Samsung Internet';
platform.name === "Samsung Internet Mobile" || }
platform.name === "Samsung Internet"
);
}
/** /**
* @hidden * @hidden
*/ */
public isIPhoneOrIPad(): boolean { public isIPhoneOrIPad(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent; const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
const isTouchable = "ontouchend" in document; const isTouchable = 'ontouchend' in document;
const isIPad = /\b(\w*Macintosh\w*)\b/.test(userAgent) && isTouchable; const isIPad = /\b(\w*Macintosh\w*)\b/.test(userAgent) && isTouchable;
const isIPhone = const isIPhone = /\b(\w*iPhone\w*)\b/.test(userAgent) && /\b(\w*Mobile\w*)\b/.test(userAgent) && isTouchable;
/\b(\w*iPhone\w*)\b/.test(userAgent) && return isIPad || isIPhone;
/\b(\w*Mobile\w*)\b/.test(userAgent) && }
isTouchable;
return isIPad || isIPhone;
}
/** /**
* @hidden * @hidden
*/ */
public isIOSWithSafari(): boolean { public isIOSWithSafari(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent; const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
return this.isIPhoneOrIPad() && ( return (
/\b(\w*Apple\w*)\b/.test(navigator.vendor) && this.isIPhoneOrIPad() &&
/\b(\w*Safari\w*)\b/.test(userAgent) && /\b(\w*Apple\w*)\b/.test(navigator.vendor) &&
!/\b(\w*CriOS\w*)\b/.test(userAgent) && /\b(\w*Safari\w*)\b/.test(userAgent) &&
!/\b(\w*FxiOS\w*)\b/.test(userAgent) !/\b(\w*CriOS\w*)\b/.test(userAgent) &&
); !/\b(\w*FxiOS\w*)\b/.test(userAgent)
} );
}
/** /**
* @hidden * @hidden
*/ */
public isIonicIos(): boolean { public isIonicIos(): boolean {
return this.isIPhoneOrIPad() && platform.ua!!.indexOf("Safari") === -1; return this.isIPhoneOrIPad() && platform.ua!!.indexOf('Safari') === -1;
} }
/** /**
* @hidden * @hidden
*/ */
public isIonicAndroid(): boolean { public isIonicAndroid(): boolean {
return ( return platform.os!!.family === 'Android' && platform.name == 'Android Browser';
platform.os!!.family === "Android" && platform.name == "Android Browser" }
);
}
/** /**
* @hidden * @hidden
*/ */
public isMobileDevice(): boolean { public isMobileDevice(): boolean {
return platform.os!!.family === "iOS" || platform.os!!.family === "Android"; return platform.os!!.family === 'iOS' || platform.os!!.family === 'Android';
} }
/** /**
* @hidden * @hidden
*/ */
public isReactNative(): boolean { public isReactNative(): boolean {
return false; return false;
} }
/** /**
* @hidden * @hidden
*/ */
public isChromium(): boolean { public isChromium(): boolean {
return this.isChromeBrowser() || this.isChromeMobileBrowser() || return (
this.isOperaBrowser() || this.isOperaMobileBrowser() || this.isChromeBrowser() ||
this.isEdgeBrowser() || this.isEdgeMobileBrowser() || this.isChromeMobileBrowser() ||
this.isSamsungBrowser() || this.isOperaBrowser() ||
this.isIonicAndroid() || this.isIonicIos() || this.isOperaMobileBrowser() ||
this.isElectron(); this.isEdgeBrowser() ||
} this.isEdgeMobileBrowser() ||
this.isSamsungBrowser() ||
this.isIonicAndroid() ||
this.isIonicIos() ||
this.isElectron()
);
}
/** /**
* @hidden * @hidden
*/ */
public canScreenShare(): boolean { public canScreenShare(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1; const version = platform?.version ? parseFloat(platform.version) : -1;
// Reject mobile devices // Reject mobile devices
if (this.isMobileDevice()) { if (this.isMobileDevice()) {
return false; return false;
} }
return ( return (
this.isChromeBrowser() || this.isChromeBrowser() ||
this.isFirefoxBrowser() || this.isFirefoxBrowser() ||
this.isOperaBrowser() || this.isOperaBrowser() ||
this.isElectron() || this.isElectron() ||
this.isEdgeBrowser() || this.isEdgeBrowser() ||
(this.isSafariBrowser() && version >= 13) (this.isSafariBrowser() && version >= 13)
); );
} }
/** /**
* @hidden * @hidden
*/ */
public getName(): string { public getName(): string {
return platform.name || ""; return platform.name || '';
} }
/** /**
* @hidden * @hidden
*/ */
public getVersion(): string { public getVersion(): string {
return platform.version || ""; return platform.version || '';
} }
/** /**
* @hidden * @hidden
*/ */
public getFamily(): string { public getFamily(): string {
return platform.os!!.family || ""; return platform.os!!.family || '';
} }
/** /**
* @hidden * @hidden
*/ */
public getDescription(): string { public getDescription(): string {
return platform.description || ""; return platform.description || '';
} }
} }

View File

@ -63,24 +63,17 @@ export class WebRtcPeer {
this.configuration = { this.configuration = {
...configuration, ...configuration,
iceServers: iceServers: !!configuration.iceServers && configuration.iceServers.length > 0 ? configuration.iceServers : freeice(),
!!configuration.iceServers && mediaStream: configuration.mediaStream !== undefined ? configuration.mediaStream : null,
configuration.iceServers.length > 0 mode: !!configuration.mode ? configuration.mode : 'sendrecv',
? configuration.iceServers id: !!configuration.id ? configuration.id : this.generateUniqueId()
: freeice(),
mediaStream:
configuration.mediaStream !== undefined
? configuration.mediaStream
: null,
mode: !!configuration.mode ? configuration.mode : "sendrecv",
id: !!configuration.id ? configuration.id : this.generateUniqueId(),
}; };
// prettier-ignore // prettier-ignore
logger.debug(`[WebRtcPeer] configuration:\n${JSON.stringify(this.configuration, null, 2)}`); logger.debug(`[WebRtcPeer] configuration:\n${JSON.stringify(this.configuration, null, 2)}`);
this.pc = new RTCPeerConnection({ iceServers: this.configuration.iceServers }); this.pc = new RTCPeerConnection({ iceServers: this.configuration.iceServers });
this.pc.addEventListener("icecandidate", (event: RTCPeerConnectionIceEvent) => { this.pc.addEventListener('icecandidate', (event: RTCPeerConnectionIceEvent) => {
if (event.candidate !== null) { if (event.candidate !== null) {
// `RTCPeerConnectionIceEvent.candidate` is supposed to be an RTCIceCandidate: // `RTCPeerConnectionIceEvent.candidate` is supposed to be an RTCIceCandidate:
// https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectioniceevent-candidate // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectioniceevent-candidate
@ -140,11 +133,11 @@ export class WebRtcPeer {
const hasVideo = this.configuration.mediaConstraints.video; const hasVideo = this.configuration.mediaConstraints.video;
const options: RTCOfferOptions = { const options: RTCOfferOptions = {
offerToReceiveAudio: this.configuration.mode !== "sendonly" && hasAudio, offerToReceiveAudio: this.configuration.mode !== 'sendonly' && hasAudio,
offerToReceiveVideo: this.configuration.mode !== "sendonly" && hasVideo, offerToReceiveVideo: this.configuration.mode !== 'sendonly' && hasVideo
}; };
logger.debug("[createOfferLegacy] RTCPeerConnection.createOffer() options:", JSON.stringify(options)); logger.debug('[createOfferLegacy] RTCPeerConnection.createOffer() options:', JSON.stringify(options));
return this.pc.createOffer(options); return this.pc.createOffer(options);
} }
@ -156,18 +149,18 @@ export class WebRtcPeer {
async createOffer(): Promise<RTCSessionDescriptionInit> { async createOffer(): Promise<RTCSessionDescriptionInit> {
// TODO: Delete this conditional when all supported browsers are // TODO: Delete this conditional when all supported browsers are
// modern enough to implement the Transceiver methods. // modern enough to implement the Transceiver methods.
if (!("addTransceiver" in this.pc)) { if (!('addTransceiver' in this.pc)) {
logger.warn( logger.warn(
"[createOffer] Method RTCPeerConnection.addTransceiver() is NOT available; using LEGACY offerToReceive{Audio,Video}" '[createOffer] Method RTCPeerConnection.addTransceiver() is NOT available; using LEGACY offerToReceive{Audio,Video}'
); );
return this.createOfferLegacy(); return this.createOfferLegacy();
} else { } else {
logger.debug("[createOffer] Method RTCPeerConnection.addTransceiver() is available; using it"); logger.debug('[createOffer] Method RTCPeerConnection.addTransceiver() is available; using it');
} }
// Spec doc: https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver // Spec doc: https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
if (this.configuration.mode !== "recvonly") { if (this.configuration.mode !== 'recvonly') {
// To send media, assume that all desired media tracks have been // To send media, assume that all desired media tracks have been
// already added by higher level code to our MediaStream. // already added by higher level code to our MediaStream.
@ -180,24 +173,18 @@ export class WebRtcPeer {
for (const track of this.configuration.mediaStream.getTracks()) { for (const track of this.configuration.mediaStream.getTracks()) {
const tcInit: RTCRtpTransceiverInit = { const tcInit: RTCRtpTransceiverInit = {
direction: this.configuration.mode, direction: this.configuration.mode,
streams: [this.configuration.mediaStream], streams: [this.configuration.mediaStream]
}; };
if (track.kind === "video" && this.configuration.simulcast) { if (track.kind === 'video' && this.configuration.simulcast) {
// Check if the requested size is enough to ask for 3 layers. // Check if the requested size is enough to ask for 3 layers.
const trackSettings = track.getSettings(); const trackSettings = track.getSettings();
const trackConsts = track.getConstraints(); const trackConsts = track.getConstraints();
const trackWidth: number = const trackWidth: number =
trackSettings.width ?? trackSettings.width ?? (trackConsts.width as ConstrainULongRange).ideal ?? (trackConsts.width as number) ?? 0;
(trackConsts.width as ConstrainULongRange).ideal ??
(trackConsts.width as number) ??
0;
const trackHeight: number = const trackHeight: number =
trackSettings.height ?? trackSettings.height ?? (trackConsts.height as ConstrainULongRange).ideal ?? (trackConsts.height as number) ?? 0;
(trackConsts.height as ConstrainULongRange).ideal ??
(trackConsts.height as number) ??
0;
logger.info(`[createOffer] Video track dimensions: ${trackWidth}x${trackHeight}`); logger.info(`[createOffer] Video track dimensions: ${trackWidth}x${trackHeight}`);
const trackPixels = trackWidth * trackHeight; const trackPixels = trackWidth * trackHeight;
@ -215,13 +202,13 @@ export class WebRtcPeer {
const layerDiv = 2 ** (maxLayers - l - 1); const layerDiv = 2 ** (maxLayers - l - 1);
const encoding: RTCRtpEncodingParameters = { const encoding: RTCRtpEncodingParameters = {
rid: "rdiv" + layerDiv.toString(), rid: 'rdiv' + layerDiv.toString(),
// @ts-ignore -- Property missing from DOM types. // @ts-ignore -- Property missing from DOM types.
scalabilityMode: "L1T1", scalabilityMode: 'L1T1'
}; };
if (["detail", "text"].includes(track.contentHint)) { if (['detail', 'text'].includes(track.contentHint)) {
// Prioritize best resolution, for maximum picture detail. // Prioritize best resolution, for maximum picture detail.
encoding.scaleResolutionDownBy = 1.0; encoding.scaleResolutionDownBy = 1.0;
@ -237,22 +224,20 @@ export class WebRtcPeer {
const tc = this.pc.addTransceiver(track, tcInit); const tc = this.pc.addTransceiver(track, tcInit);
if (track.kind === "video") { if (track.kind === 'video') {
let sendParams = tc.sender.getParameters(); let sendParams = tc.sender.getParameters();
let needSetParams = false; let needSetParams = false;
if (!sendParams.degradationPreference?.length) { if (!sendParams.degradationPreference?.length) {
// degradationPreference for video: "balanced", "maintain-framerate", "maintain-resolution". // degradationPreference for video: "balanced", "maintain-framerate", "maintain-resolution".
// https://www.w3.org/TR/2018/CR-webrtc-20180927/#dom-rtcdegradationpreference // https://www.w3.org/TR/2018/CR-webrtc-20180927/#dom-rtcdegradationpreference
if (["detail", "text"].includes(track.contentHint)) { if (['detail', 'text'].includes(track.contentHint)) {
sendParams.degradationPreference = "maintain-resolution"; sendParams.degradationPreference = 'maintain-resolution';
} else { } else {
sendParams.degradationPreference = "balanced"; sendParams.degradationPreference = 'balanced';
} }
logger.info( logger.info(`[createOffer] Video sender Degradation Preference set: ${sendParams.degradationPreference}`);
`[createOffer] Video sender Degradation Preference set: ${sendParams.degradationPreference}`
);
// FIXME: Firefox implements degradationPreference on each individual encoding! // FIXME: Firefox implements degradationPreference on each individual encoding!
// (set it on every element of the sendParams.encodings array) // (set it on every element of the sendParams.encodings array)
@ -310,7 +295,7 @@ export class WebRtcPeer {
} }
} else { } else {
// To just receive media, create new recvonly transceivers. // To just receive media, create new recvonly transceivers.
for (const kind of ["audio", "video"]) { for (const kind of ['audio', 'video']) {
// Check if the media kind should be used. // Check if the media kind should be used.
if (!this.configuration.mediaConstraints[kind]) { if (!this.configuration.mediaConstraints[kind]) {
continue; continue;
@ -319,7 +304,7 @@ export class WebRtcPeer {
this.configuration.mediaStream = new MediaStream(); this.configuration.mediaStream = new MediaStream();
this.pc.addTransceiver(kind, { this.pc.addTransceiver(kind, {
direction: this.configuration.mode, direction: this.configuration.mode,
streams: [this.configuration.mediaStream], streams: [this.configuration.mediaStream]
}); });
} }
} }
@ -352,23 +337,21 @@ export class WebRtcPeer {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// TODO: Delete this conditional when all supported browsers are // TODO: Delete this conditional when all supported browsers are
// modern enough to implement the Transceiver methods. // modern enough to implement the Transceiver methods.
if ("getTransceivers" in this.pc) { if ('getTransceivers' in this.pc) {
logger.debug("[createAnswer] Method RTCPeerConnection.getTransceivers() is available; using it"); logger.debug('[createAnswer] Method RTCPeerConnection.getTransceivers() is available; using it');
// Ensure that the PeerConnection already contains one Transceiver // Ensure that the PeerConnection already contains one Transceiver
// for each kind of media. // for each kind of media.
// The Transceivers should have been already created internally by // The Transceivers should have been already created internally by
// the PC itself, when `pc.setRemoteDescription(sdpOffer)` was called. // the PC itself, when `pc.setRemoteDescription(sdpOffer)` was called.
for (const kind of ["audio", "video"]) { for (const kind of ['audio', 'video']) {
// Check if the media kind should be used. // Check if the media kind should be used.
if (!this.configuration.mediaConstraints[kind]) { if (!this.configuration.mediaConstraints[kind]) {
continue; continue;
} }
let tc = this.pc let tc = this.pc.getTransceivers().find((tc) => tc.receiver.track.kind === kind);
.getTransceivers()
.find((tc) => tc.receiver.track.kind === kind);
if (tc) { if (tc) {
// Enforce our desired direction. // Enforce our desired direction.
@ -382,27 +365,25 @@ export class WebRtcPeer {
.createAnswer() .createAnswer()
.then((sdpAnswer) => resolve(sdpAnswer)) .then((sdpAnswer) => resolve(sdpAnswer))
.catch((error) => reject(error)); .catch((error) => reject(error));
} else { } else {
// TODO: Delete else branch when all supported browsers are // TODO: Delete else branch when all supported browsers are
// modern enough to implement the Transceiver methods // modern enough to implement the Transceiver methods
let offerAudio, offerVideo = true; let offerAudio,
offerVideo = true;
if (!!this.configuration.mediaConstraints) { if (!!this.configuration.mediaConstraints) {
offerAudio = (typeof this.configuration.mediaConstraints.audio === 'boolean') ? offerAudio =
this.configuration.mediaConstraints.audio : true; typeof this.configuration.mediaConstraints.audio === 'boolean' ? this.configuration.mediaConstraints.audio : true;
offerVideo = (typeof this.configuration.mediaConstraints.video === 'boolean') ? offerVideo =
this.configuration.mediaConstraints.video : true; typeof this.configuration.mediaConstraints.video === 'boolean' ? this.configuration.mediaConstraints.video : true;
const constraints: RTCOfferOptions = { const constraints: RTCOfferOptions = {
offerToReceiveAudio: offerAudio, offerToReceiveAudio: offerAudio,
offerToReceiveVideo: offerVideo offerToReceiveVideo: offerVideo
}; };
this.pc!.createAnswer(constraints) this.pc!.createAnswer(constraints)
.then(sdpAnswer => resolve(sdpAnswer)) .then((sdpAnswer) => resolve(sdpAnswer))
.catch(error => reject(error)); .catch((error) => reject(error));
} }
} }
// else, there is nothing to do; the legacy createAnswer() options do // else, there is nothing to do; the legacy createAnswer() options do
@ -415,7 +396,8 @@ export class WebRtcPeer {
*/ */
processLocalOffer(offer: RTCSessionDescriptionInit): Promise<void> { processLocalOffer(offer: RTCSessionDescriptionInit): Promise<void> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.pc.setLocalDescription(offer) this.pc
.setLocalDescription(offer)
.then(() => { .then(() => {
const localDescription = this.pc.localDescription; const localDescription = this.pc.localDescription;
if (!!localDescription) { if (!!localDescription) {
@ -425,7 +407,7 @@ export class WebRtcPeer {
return reject('Local description is not defined'); return reject('Local description is not defined');
} }
}) })
.catch(error => reject(error)); .catch((error) => reject(error));
}); });
} }
@ -445,7 +427,7 @@ export class WebRtcPeer {
} }
this.setRemoteDescription(offer) this.setRemoteDescription(offer)
.then(() => resolve()) .then(() => resolve())
.catch(error => reject(error)); .catch((error) => reject(error));
}); });
} }
@ -458,9 +440,10 @@ export class WebRtcPeer {
if (this.pc.signalingState === 'closed') { if (this.pc.signalingState === 'closed') {
return reject('RTCPeerConnection is closed when trying to set local description'); return reject('RTCPeerConnection is closed when trying to set local description');
} }
this.pc.setLocalDescription(answer) this.pc
.setLocalDescription(answer)
.then(() => resolve()) .then(() => resolve())
.catch(error => reject(error)); .catch((error) => reject(error));
}); });
} }
@ -513,7 +496,10 @@ export class WebRtcPeer {
break; break;
case 'stable': case 'stable':
if (!!this.pc.remoteDescription) { if (!!this.pc.remoteDescription) {
this.pc.addIceCandidate(iceCandidate).then(() => resolve()).catch(error => reject(error)); this.pc
.addIceCandidate(iceCandidate)
.then(() => resolve())
.catch((error) => reject(error));
} else { } else {
this.iceCandidateList.push(iceCandidate); this.iceCandidateList.push(iceCandidate);
resolve(); resolve();
@ -532,7 +518,12 @@ export class WebRtcPeer {
switch (iceConnectionState) { switch (iceConnectionState) {
case 'disconnected': case 'disconnected':
// Possible network disconnection // Possible network disconnection
const msg1 = 'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "disconnected". Possible network disconnection'; const msg1 =
'IceConnectionState of RTCPeerConnection ' +
this.configuration.id +
' (' +
otherId +
') change to "disconnected". Possible network disconnection';
logger.warn(msg1); logger.warn(msg1);
this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_DISCONNECTED, msg1); this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_DISCONNECTED, msg1);
break; break;
@ -542,19 +533,27 @@ export class WebRtcPeer {
this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_FAILED, msg2); this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_FAILED, msg2);
break; break;
case 'closed': case 'closed':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "closed"'); logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "closed"'
);
break; break;
case 'new': case 'new':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "new"'); logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "new"');
break; break;
case 'checking': case 'checking':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "checking"'); logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "checking"'
);
break; break;
case 'connected': case 'connected':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "connected"'); logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "connected"'
);
break; break;
case 'completed': case 'completed':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "completed"'); logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "completed"'
);
break; break;
} }
}); });
@ -566,10 +565,8 @@ export class WebRtcPeer {
generateUniqueId(): string { generateUniqueId(): string {
return uuidv4(); return uuidv4();
} }
} }
export class WebRtcPeerRecvonly extends WebRtcPeer { export class WebRtcPeerRecvonly extends WebRtcPeer {
constructor(configuration: WebRtcPeerConfiguration) { constructor(configuration: WebRtcPeerConfiguration) {
configuration.mode = 'recvonly'; configuration.mode = 'recvonly';

View File

@ -30,18 +30,18 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
let platform: PlatformUtils; let platform: PlatformUtils;
interface WebrtcStatsConfig { interface WebrtcStatsConfig {
interval: number, interval: number;
httpEndpoint: string httpEndpoint: string;
} }
interface JSONStatsResponse { interface JSONStatsResponse {
'@timestamp': string, '@timestamp': string;
participant_id: string, participant_id: string;
session_id: string, session_id: string;
platform: string, platform: string;
platform_description: string, platform_description: string;
stream: string, stream: string;
webrtc_stats: IWebrtcStats webrtc_stats: IWebrtcStats;
} }
/** /**
@ -49,55 +49,62 @@ interface JSONStatsResponse {
*/ */
interface IWebrtcStats { interface IWebrtcStats {
inbound?: { inbound?: {
audio: { audio:
bytesReceived: number, | {
packetsReceived: number, bytesReceived: number;
packetsLost: number, packetsReceived: number;
jitter: number packetsLost: number;
} | {}, jitter: number;
video: { }
bytesReceived: number, | {};
packetsReceived: number, video:
packetsLost: number, | {
jitter?: number, // Firefox bytesReceived: number;
jitterBufferDelay?: number, // Chrome packetsReceived: number;
framesDecoded: number, packetsLost: number;
firCount: number, jitter?: number; // Firefox
nackCount: number, jitterBufferDelay?: number; // Chrome
pliCount: number, framesDecoded: number;
frameHeight?: number, // Chrome firCount: number;
frameWidth?: number, // Chrome nackCount: number;
framesDropped?: number, // Chrome pliCount: number;
framesReceived?: number // Chrome frameHeight?: number; // Chrome
} | {} frameWidth?: number; // Chrome
}, framesDropped?: number; // Chrome
framesReceived?: number; // Chrome
}
| {};
};
outbound?: { outbound?: {
audio: { audio:
bytesSent: number, | {
packetsSent: number, bytesSent: number;
} | {}, packetsSent: number;
video: { }
bytesSent: number, | {};
packetsSent: number, video:
firCount: number, | {
framesEncoded: number, bytesSent: number;
nackCount: number, packetsSent: number;
pliCount: number, firCount: number;
qpSum: number, framesEncoded: number;
frameHeight?: number, // Chrome nackCount: number;
frameWidth?: number, // Chrome pliCount: number;
framesSent?: number // Chrome qpSum: number;
} | {} frameHeight?: number; // Chrome
}, frameWidth?: number; // Chrome
framesSent?: number; // Chrome
}
| {};
};
candidatepair?: { candidatepair?: {
currentRoundTripTime?: number // Chrome currentRoundTripTime?: number; // Chrome
availableOutgoingBitrate?: number //Chrome availableOutgoingBitrate?: number; //Chrome
// availableIncomingBitrate?: number // No support for any browsers (https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/availableIncomingBitrate) // availableIncomingBitrate?: number // No support for any browsers (https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/availableIncomingBitrate)
} };
}; }
export class WebRtcStats { export class WebRtcStats {
private readonly STATS_ITEM_NAME = 'webrtc-stats-config'; private readonly STATS_ITEM_NAME = 'webrtc-stats-config';
private webRtcStatsEnabled = false; private webRtcStatsEnabled = false;
@ -114,23 +121,23 @@ export class WebRtcStats {
} }
public initWebRtcStats(): void { public initWebRtcStats(): void {
const webrtcObj = localStorage.getItem(this.STATS_ITEM_NAME); const webrtcObj = localStorage.getItem(this.STATS_ITEM_NAME);
if (!!webrtcObj) { if (!!webrtcObj) {
this.webRtcStatsEnabled = true; this.webRtcStatsEnabled = true;
const webrtcStatsConfig: WebrtcStatsConfig = JSON.parse(webrtcObj); const webrtcStatsConfig: WebrtcStatsConfig = JSON.parse(webrtcObj);
// webrtc object found in local storage // webrtc object found in local storage
logger.warn('WebRtc stats enabled for stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId); logger.warn(
'WebRtc stats enabled for stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId
);
logger.warn('localStorage item: ' + JSON.stringify(webrtcStatsConfig)); logger.warn('localStorage item: ' + JSON.stringify(webrtcStatsConfig));
this.POST_URL = webrtcStatsConfig.httpEndpoint; this.POST_URL = webrtcStatsConfig.httpEndpoint;
this.statsInterval = webrtcStatsConfig.interval; // Interval in seconds this.statsInterval = webrtcStatsConfig.interval; // Interval in seconds
this.webRtcStatsIntervalId = setInterval(async () => { this.webRtcStatsIntervalId = setInterval(async () => {
await this.sendStatsToHttpEndpoint(); await this.sendStatsToHttpEndpoint();
}, this.statsInterval * 1000); }, this.statsInterval * 1000);
} else { } else {
logger.debug('WebRtc stats not enabled'); logger.debug('WebRtc stats not enabled');
} }
@ -206,7 +213,6 @@ export class WebRtcStats {
// - ¿React Native? // - ¿React Native?
public getSelectedIceCandidateInfo(): Promise<any> { public getSelectedIceCandidateInfo(): Promise<any> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
const statsReport: any = await this.stream.getRTCPeerConnection().getStats(); const statsReport: any = await this.stream.getRTCPeerConnection().getStats();
let transportStat; let transportStat;
const candidatePairs: Map<string, any> = new Map(); const candidatePairs: Map<string, any> = new Map();
@ -230,7 +236,7 @@ export class WebRtcStats {
}); });
let selectedCandidatePair; let selectedCandidatePair;
if (transportStat != null) { if (transportStat != null) {
const selectedCandidatePairId = transportStat.selectedCandidatePairId const selectedCandidatePairId = transportStat.selectedCandidatePairId;
selectedCandidatePair = candidatePairs.get(selectedCandidatePairId); selectedCandidatePair = candidatePairs.get(selectedCandidatePairId);
} else { } else {
// This is basically Firefox // This is basically Firefox
@ -250,9 +256,11 @@ export class WebRtcStats {
if (!!finalLocalCandidate) { if (!!finalLocalCandidate) {
const candList = this.stream.getLocalIceCandidateList(); const candList = this.stream.getLocalIceCandidateList();
const cand = candList.filter((c: RTCIceCandidate) => { const cand = candList.filter((c: RTCIceCandidate) => {
return (!!c.candidate && return (
!!c.candidate &&
(c.candidate.indexOf(finalLocalCandidate.ip) >= 0 || c.candidate.indexOf(finalLocalCandidate.address) >= 0) && (c.candidate.indexOf(finalLocalCandidate.ip) >= 0 || c.candidate.indexOf(finalLocalCandidate.address) >= 0) &&
c.candidate.indexOf(finalLocalCandidate.port) >= 0); c.candidate.indexOf(finalLocalCandidate.port) >= 0
);
}); });
finalLocalCandidate.raw = []; finalLocalCandidate.raw = [];
for (let c of cand) { for (let c of cand) {
@ -266,9 +274,11 @@ export class WebRtcStats {
if (!!finalRemoteCandidate) { if (!!finalRemoteCandidate) {
const candList = this.stream.getRemoteIceCandidateList(); const candList = this.stream.getRemoteIceCandidateList();
const cand = candList.filter((c: RTCIceCandidate) => { const cand = candList.filter((c: RTCIceCandidate) => {
return (!!c.candidate && return (
!!c.candidate &&
(c.candidate.indexOf(finalRemoteCandidate.ip) >= 0 || c.candidate.indexOf(finalRemoteCandidate.address) >= 0) && (c.candidate.indexOf(finalRemoteCandidate.ip) >= 0 || c.candidate.indexOf(finalRemoteCandidate.address) >= 0) &&
c.candidate.indexOf(finalRemoteCandidate.port) >= 0); c.candidate.indexOf(finalRemoteCandidate.port) >= 0
);
}); });
finalRemoteCandidate.raw = []; finalRemoteCandidate.raw = [];
for (let c of cand) { for (let c of cand) {
@ -288,7 +298,9 @@ export class WebRtcStats {
public stopWebRtcStats() { public stopWebRtcStats() {
if (this.webRtcStatsEnabled) { if (this.webRtcStatsEnabled) {
clearInterval(this.webRtcStatsIntervalId); clearInterval(this.webRtcStatsIntervalId);
logger.warn('WebRtc stats stopped for disposed stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId); logger.warn(
'WebRtc stats stopped for disposed stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId
);
} }
} }
@ -299,10 +311,9 @@ export class WebRtcStats {
'Content-type': 'application/json' 'Content-type': 'application/json'
}, },
body: JSON.stringify(response), body: JSON.stringify(response),
method: 'POST', method: 'POST'
}; };
await fetch(url, configuration); await fetch(url, configuration);
} catch (error) { } catch (error) {
logger.error(`sendStats error: ${JSON.stringify(error)}`); logger.error(`sendStats error: ${JSON.stringify(error)}`);
} }
@ -350,9 +361,7 @@ export class WebRtcStats {
// - ¿Ionic? // - ¿Ionic?
// - ¿React Native? // - ¿React Native?
public async getCommonStats(): Promise<IWebrtcStats> { public async getCommonStats(): Promise<IWebrtcStats> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const statsReport: any = await this.stream.getRTCPeerConnection().getStats(); const statsReport: any = await this.stream.getRTCPeerConnection().getStats();
const response: IWebrtcStats = this.getWebRtcStatsResponseOutline(); const response: IWebrtcStats = this.getWebRtcStatsResponseOutline();
@ -360,24 +369,23 @@ export class WebRtcStats {
const candidatePairStats = ['availableOutgoingBitrate', 'currentRoundTripTime']; const candidatePairStats = ['availableOutgoingBitrate', 'currentRoundTripTime'];
statsReport.forEach((stat: any) => { statsReport.forEach((stat: any) => {
let mediaType = stat.mediaType != null ? stat.mediaType : stat.kind; let mediaType = stat.mediaType != null ? stat.mediaType : stat.kind;
const addStat = (direction: string, key: string): void => { const addStat = (direction: string, key: string): void => {
if (stat[key] != null && response[direction] != null) { if (stat[key] != null && response[direction] != null) {
if (!mediaType && (videoTrackStats.indexOf(key) > -1)) { if (!mediaType && videoTrackStats.indexOf(key) > -1) {
mediaType = 'video'; mediaType = 'video';
} }
if (direction != null && mediaType != null && key != null && response[direction][mediaType] != null) { if (direction != null && mediaType != null && key != null && response[direction][mediaType] != null) {
response[direction][mediaType][key] = Number(stat[key]); response[direction][mediaType][key] = Number(stat[key]);
} else if(direction != null && key != null && candidatePairStats.includes(key)) { } else if (direction != null && key != null && candidatePairStats.includes(key)) {
// candidate-pair-stats // candidate-pair-stats
response[direction][key] = Number(stat[key]); response[direction][key] = Number(stat[key]);
} }
} }
} };
switch (stat.type) { switch (stat.type) {
case "outbound-rtp": case 'outbound-rtp':
addStat('outbound', 'bytesSent'); addStat('outbound', 'bytesSent');
addStat('outbound', 'packetsSent'); addStat('outbound', 'packetsSent');
addStat('outbound', 'framesEncoded'); addStat('outbound', 'framesEncoded');
@ -386,7 +394,7 @@ export class WebRtcStats {
addStat('outbound', 'pliCount'); addStat('outbound', 'pliCount');
addStat('outbound', 'qpSum'); addStat('outbound', 'qpSum');
break; break;
case "inbound-rtp": case 'inbound-rtp':
addStat('inbound', 'bytesReceived'); addStat('inbound', 'bytesReceived');
addStat('inbound', 'packetsReceived'); addStat('inbound', 'packetsReceived');
addStat('inbound', 'packetsLost'); addStat('inbound', 'packetsLost');
@ -412,7 +420,7 @@ export class WebRtcStats {
}); });
// Delete candidatepair from response if null // Delete candidatepair from response if null
if(!response?.candidatepair || Object.keys(<Object>response.candidatepair).length === 0){ if (!response?.candidatepair || Object.keys(<Object>response.candidatepair).length === 0) {
delete response.candidatepair; delete response.candidatepair;
} }
@ -421,7 +429,6 @@ export class WebRtcStats {
logger.error('Error getting common stats: ', error); logger.error('Error getting common stats: ', error);
return reject(error); return reject(error);
} }
}); });
} }
@ -455,5 +462,4 @@ export class WebRtcStats {
}; };
} }
} }
}
}

View File

@ -45,4 +45,4 @@ export { StreamManagerEventMap } from './OpenViduInternal/Events/EventMap/Stream
export { PublisherEventMap } from './OpenViduInternal/Events/EventMap/PublisherEventMap'; export { PublisherEventMap } from './OpenViduInternal/Events/EventMap/PublisherEventMap';
// Disable jsnlog when library is loaded // Disable jsnlog when library is loaded
JL.setOptions({ enabled: false }) JL.setOptions({ enabled: false });