openvidu-browser: Added common format config file

pull/750/head
csantosm 2022-08-17 18:04:05 +02:00
parent 128dd3cfed
commit 2ce54f577b
66 changed files with 3711 additions and 3328 deletions

View File

@ -0,0 +1,10 @@
{
"singleQuote": true,
"printWidth": 140,
"trailingComma": "none",
"semi": true,
"bracketSpacing": true,
"useTabs": false,
"jsxSingleQuote": true,
"tabWidth": 4
}

View File

@ -6,4 +6,4 @@ if (typeof globalThis !== 'undefined') {
}
// Disable jsnlog when library is loaded
JL.setOptions({ enabled: false })
JL.setOptions({ enabled: false });

View File

@ -29,13 +29,11 @@ import { ExceptionEvent, ExceptionEventName } from '../OpenViduInternal/Events/E
*/
const logger: OpenViduLogger = OpenViduLogger.getInstance();
/**
* Represents each one of the user's connection to the session (the local one and other user's connections).
* Therefore each [[Session]] and [[Stream]] object has an attribute of type Connection
*/
export class Connection {
/**
* Unique identifier of the connection
*/
@ -125,38 +123,46 @@ export class Connection {
logger.info(msg);
}
/* Hidden methods */
/**
* @hidden
*/
sendIceCandidate(candidate: RTCIceCandidate): void {
logger.debug((!!this.stream!.outboundStreamOpts ? 'Local' : 'Remote') + 'candidate for' + this.connectionId, candidate);
logger.debug((!!this.stream!.outboundStreamOpts ? 'Local' : 'Remote') + 'candidate for' +
this.connectionId, candidate);
this.session.openvidu.sendRequest('onIceCandidate', {
endpointName: this.connectionId,
candidate: candidate.candidate,
sdpMid: candidate.sdpMid,
sdpMLineIndex: candidate.sdpMLineIndex
}, (error, response) => {
if (error) {
logger.error('Error sending ICE candidate: ' + JSON.stringify(error));
this.session.emitEvent('exception', [new ExceptionEvent(this.session, ExceptionEventName.ICE_CANDIDATE_ERROR, this.session, "There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side", error)]);
this.session.openvidu.sendRequest(
'onIceCandidate',
{
endpointName: this.connectionId,
candidate: candidate.candidate,
sdpMid: candidate.sdpMid,
sdpMLineIndex: candidate.sdpMLineIndex
},
(error, response) => {
if (error) {
logger.error('Error sending ICE candidate: ' + JSON.stringify(error));
this.session.emitEvent('exception', [
new ExceptionEvent(
this.session,
ExceptionEventName.ICE_CANDIDATE_ERROR,
this.session,
'There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side',
error
)
]);
}
}
});
);
}
/**
* @hidden
*/
initRemoteStreams(options: StreamOptionsServer[]): void {
// This is ready for supporting multiple streams per Connection object. Right now the loop will always run just once
// this.stream should also be replaced by a collection of streams to support multiple streams per Connection
options.forEach(opts => {
options.forEach((opts) => {
const streamOptions: InboundStreamOptions = {
id: opts.id,
createdAt: opts.createdAt,
@ -175,7 +181,10 @@ export class Connection {
this.addStream(stream);
});
logger.info("Remote 'Connection' with 'connectionId' [" + this.connectionId + '] is now configured for receiving Streams with options: ', this.stream!.inboundStreamOpts);
logger.info(
"Remote 'Connection' with 'connectionId' [" + this.connectionId + '] is now configured for receiving Streams with options: ',
this.stream!.inboundStreamOpts
);
}
/**
@ -202,5 +211,4 @@ export class Connection {
}
this.disposed = true;
}
}

View File

@ -27,7 +27,6 @@ import { OpenViduLogger } from '../OpenViduInternal/Logger/OpenViduLogger';
const logger: OpenViduLogger = OpenViduLogger.getInstance();
export abstract class EventDispatcher {
/**
* @hidden
*/
@ -42,27 +41,27 @@ export abstract class EventDispatcher {
*
* @returns The EventDispatcher object
*/
abstract on<K extends keyof (EventMap)>(type: K, handler: (event: (EventMap)[K]) => void): this;
abstract on<K extends keyof EventMap>(type: K, handler: (event: EventMap[K]) => void): this;
/**
* Adds function `handler` to handle event `type` just once. The handler will be automatically removed after first execution
*
* @returns The object that dispatched the event
*/
abstract once<K extends keyof (EventMap)>(type: K, handler: (event: (EventMap)[K]) => void): this;
abstract once<K extends keyof EventMap>(type: K, handler: (event: EventMap[K]) => void): this;
/**
* Removes a `handler` from event `type`. If no handler is provided, all handlers will be removed from the event
*
* @returns The object that dispatched the event
*/
abstract off<K extends keyof (EventMap)>(type: K, handler?: (event: (EventMap)[K]) => void): this;
abstract off<K extends keyof EventMap>(type: K, handler?: (event: EventMap[K]) => void): this;
/**
* @hidden
*/
onAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher {
const arrowHandler = event => {
const arrowHandler = (event) => {
if (event) {
logger.info(message, event);
} else {
@ -79,7 +78,7 @@ export abstract class EventDispatcher {
* @hidden
*/
onceAux(type: string, message: string, handler: (event: Event) => void): EventDispatcher {
const arrowHandler = event => {
const arrowHandler = (event) => {
if (event) {
logger.info(message, event);
} else {
@ -110,5 +109,4 @@ export abstract class EventDispatcher {
}
return this;
}
}
}

View File

@ -32,7 +32,6 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
* Video/audio filter applied to a Stream. See [[Stream.applyFilter]]
*/
export class Filter {
/**
* Type of filter applied. This is the name of the remote class identifying the filter to apply in Kurento Media Server.
* For example: `"FaceOverlayFilter"`, `"GStreamerFilter"`.
@ -59,7 +58,8 @@ export class Filter {
* You can use this value to know the current status of any applied filter
*/
lastExecMethod?: {
method: string, params: Object
method: string;
params: Object;
};
/**
@ -73,7 +73,6 @@ export class Filter {
stream: Stream;
private logger: OpenViduLogger;
/**
* @hidden
*/
@ -82,7 +81,6 @@ export class Filter {
this.options = options;
}
/**
* Executes a filter method. Available methods are specific for each filter
*
@ -91,24 +89,40 @@ export class Filter {
*/
execMethod(method: string, params: Object): Promise<void> {
return new Promise((resolve, reject) => {
logger.info('Executing filter method to stream ' + this.stream.streamId);
let finalParams;
const successExecMethod = triggerEvent => {
const successExecMethod = (triggerEvent) => {
logger.info('Filter method successfully executed on Stream ' + this.stream.streamId);
const oldValue = (<any>Object).assign({}, this.stream.filter);
this.stream.filter!.lastExecMethod = { method, params: finalParams };
if (triggerEvent) {
this.stream.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.stream.session, this.stream, 'filter', this.stream.filter!, oldValue, 'execFilterMethod')]);
this.stream.streamManager.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.stream.streamManager, this.stream, 'filter', this.stream.filter!, oldValue, 'execFilterMethod')]);
this.stream.session.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(
this.stream.session,
this.stream,
'filter',
this.stream.filter!,
oldValue,
'execFilterMethod'
)
]);
this.stream.streamManager.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(
this.stream.streamManager,
this.stream,
'filter',
this.stream.filter!,
oldValue,
'execFilterMethod'
)
]);
}
return resolve();
}
};
if (this.type.startsWith('VB:')) {
if (typeof params === 'string') {
try {
params = JSON.parse(params);
@ -121,23 +135,31 @@ export class Filter {
if (method === 'update') {
if (!this.stream.virtualBackgroundSinkElements?.VB) {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'There is no Virtual Background filter applied'));
return reject(
new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'There is no Virtual Background filter applied')
);
} else {
this.stream.virtualBackgroundSinkElements.VB.updateValues(params)
.then(() => successExecMethod(false))
.catch(error => {
.catch((error) => {
if (error.name === OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR) {
return reject(new OpenViduError(error.name, error.message));
} else {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, 'Error updating values on Virtual Background filter: ' + error));
return reject(
new OpenViduError(
OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR,
'Error updating values on Virtual Background filter: ' + error
)
);
}
});
}
} else {
return reject(new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, `Unknown Virtual Background method "${method}"`));
return reject(
new OpenViduError(OpenViduErrorName.VIRTUAL_BACKGROUND_ERROR, `Unknown Virtual Background method "${method}"`)
);
}
} else {
let stringParams;
if (typeof params !== 'string') {
try {
@ -160,7 +182,12 @@ export class Filter {
if (error) {
logger.error('Error executing filter method for Stream ' + this.stream.streamId, error);
if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to execute a filter method"));
return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to execute a filter method"
)
);
} else {
return reject(error);
}
@ -173,7 +200,6 @@ export class Filter {
});
}
/**
* Subscribe to certain filter event. Available events are specific for each filter
*
@ -190,15 +216,25 @@ export class Filter {
{ streamId: this.stream.streamId, eventType },
(error, response) => {
if (error) {
logger.error('Error adding filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId, error);
logger.error(
'Error adding filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId,
error
);
if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to add a filter event listener"));
return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to add a filter event listener"
)
);
} else {
return reject(error);
}
} else {
this.handlers.set(eventType, handler);
logger.info('Filter event listener to event ' + eventType + ' successfully applied on Stream ' + this.stream.streamId);
logger.info(
'Filter event listener to event ' + eventType + ' successfully applied on Stream ' + this.stream.streamId
);
return resolve();
}
}
@ -206,7 +242,6 @@ export class Filter {
});
}
/**
* Removes certain filter event listener previously set.
*
@ -222,20 +257,29 @@ export class Filter {
{ streamId: this.stream.streamId, eventType },
(error, response) => {
if (error) {
logger.error('Error removing filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId, error);
logger.error(
'Error removing filter event listener to event ' + eventType + 'for Stream ' + this.stream.streamId,
error
);
if (error.code === 401) {
return reject(new OpenViduError(OpenViduErrorName.OPENVIDU_PERMISSION_DENIED, "You don't have permissions to add a filter event listener"));
return reject(
new OpenViduError(
OpenViduErrorName.OPENVIDU_PERMISSION_DENIED,
"You don't have permissions to add a filter event listener"
)
);
} else {
return reject(error);
}
} else {
this.handlers.delete(eventType);
logger.info('Filter event listener to event ' + eventType + ' successfully removed on Stream ' + this.stream.streamId);
logger.info(
'Filter event listener to event ' + eventType + ' successfully removed on Stream ' + this.stream.streamId
);
return resolve();
}
}
);
});
}
}
}

View File

@ -31,12 +31,10 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
*/
let platform: PlatformUtils;
/**
* Easy recording of [[Stream]] objects straightaway from the browser. Initialized with [[OpenVidu.initLocalRecorder]] method
*/
export class LocalRecorder {
state: LocalRecorderState;
private connectionId: string;
@ -52,18 +50,17 @@ export class LocalRecorder {
*/
constructor(private stream: Stream) {
platform = PlatformUtils.getInstance();
this.connectionId = (!!this.stream.connection) ? this.stream.connection.connectionId : 'default-connection';
this.connectionId = !!this.stream.connection ? this.stream.connection.connectionId : 'default-connection';
this.id = this.stream.streamId + '_' + this.connectionId + '_localrecord';
this.state = LocalRecorderState.READY;
}
/**
* Starts the recording of the Stream. [[state]] property must be `READY`. After method succeeds is set to `RECORDING`
*
* @param options The [MediaRecorder.options](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/MediaRecorder#parameters) to be used to record this Stream.
* For example:
*
* For example:
*
* ```javascript
* var OV = new OpenVidu();
* var publisher = await OV.initPublisherAsync();
@ -75,7 +72,7 @@ export class LocalRecorder {
* };
* localRecorder.record(options);
* ```
*
*
* If not specified, the default options preferred by the platform will be used.
*
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully started and rejected with an Error object if not
@ -84,14 +81,24 @@ export class LocalRecorder {
return new Promise((resolve, reject) => {
try {
if (typeof options === 'string' || options instanceof String) {
return reject(`When calling LocalRecorder.record(options) parameter 'options' cannot be a string. Must be an object like { mimeType: "${options}" }`);
return reject(
`When calling LocalRecorder.record(options) parameter 'options' cannot be a string. Must be an object like { mimeType: "${options}" }`
);
}
if (typeof MediaRecorder === 'undefined') {
logger.error('MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder');
throw (Error('MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'));
logger.error(
'MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'
);
throw Error(
'MediaRecorder not supported on your device. See compatibility in https://caniuse.com/#search=MediaRecorder'
);
}
if (this.state !== LocalRecorderState.READY) {
throw (Error('\'LocalRecord.record()\' needs \'LocalRecord.state\' to be \'READY\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.clean()\' or init a new LocalRecorder before'));
throw Error(
"'LocalRecord.record()' needs 'LocalRecord.state' to be 'READY' (current value: '" +
this.state +
"'). Call 'LocalRecorder.clean()' or init a new LocalRecorder before"
);
}
logger.log("Starting local recording of stream '" + this.stream.streamId + "' of connection '" + this.connectionId + "'");
@ -103,7 +110,6 @@ export class LocalRecorder {
this.mediaRecorder = new MediaRecorder(this.stream.getMediaStream(), options);
this.mediaRecorder.start();
} catch (err) {
return reject(err);
}
@ -136,11 +142,9 @@ export class LocalRecorder {
this.state = LocalRecorderState.RECORDING;
return resolve();
});
}
/**
* Ends the recording of the Stream. [[state]] property must be `RECORDING` or `PAUSED`. After method succeeds is set to `FINISHED`
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording successfully stopped and rejected with an Error object if not
@ -149,7 +153,11 @@ export class LocalRecorder {
return new Promise((resolve, reject) => {
try {
if (this.state === LocalRecorderState.READY || this.state === LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.stop()\' needs \'LocalRecord.state\' to be \'RECORDING\' or \'PAUSED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.start()\' before'));
throw Error(
"'LocalRecord.stop()' needs 'LocalRecord.state' to be 'RECORDING' or 'PAUSED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.start()' before"
);
}
this.mediaRecorder.onstop = () => {
this.onStopDefault();
@ -162,7 +170,6 @@ export class LocalRecorder {
});
}
/**
* Pauses the recording of the Stream. [[state]] property must be `RECORDING`. After method succeeds is set to `PAUSED`
* @returns A Promise (to which you can optionally subscribe to) that is resolved if the recording was successfully paused and rejected with an Error object if not
@ -171,7 +178,13 @@ export class LocalRecorder {
return new Promise((resolve, reject) => {
try {
if (this.state !== LocalRecorderState.RECORDING) {
return reject(Error('\'LocalRecord.pause()\' needs \'LocalRecord.state\' to be \'RECORDING\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.start()\' or \'LocalRecorder.resume()\' before'));
return reject(
Error(
"'LocalRecord.pause()' needs 'LocalRecord.state' to be 'RECORDING' (current value: '" +
this.state +
"'). Call 'LocalRecorder.start()' or 'LocalRecorder.resume()' before"
)
);
}
this.mediaRecorder.pause();
this.state = LocalRecorderState.PAUSED;
@ -190,7 +203,11 @@ export class LocalRecorder {
return new Promise((resolve, reject) => {
try {
if (this.state !== LocalRecorderState.PAUSED) {
throw (Error('\'LocalRecord.resume()\' needs \'LocalRecord.state\' to be \'PAUSED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.pause()\' before'));
throw Error(
"'LocalRecord.resume()' needs 'LocalRecord.state' to be 'PAUSED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.pause()' before"
);
}
this.mediaRecorder.resume();
this.state = LocalRecorderState.RECORDING;
@ -201,14 +218,16 @@ export class LocalRecorder {
});
}
/**
* Previews the recording, appending a new HTMLVideoElement to element with id `parentId`. [[state]] property must be `FINISHED`
*/
preview(parentElement): HTMLVideoElement {
if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.preview()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before'));
throw Error(
"'LocalRecord.preview()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
);
}
this.videoPreview = document.createElement('video');
@ -234,7 +253,6 @@ export class LocalRecorder {
return this.videoPreview;
}
/**
* Gracefully stops and cleans the current recording (WARNING: it is completely dismissed). Sets [[state]] to `READY` so the recording can start again
*/
@ -245,19 +263,24 @@ export class LocalRecorder {
this.state = LocalRecorderState.READY;
};
if (this.state === LocalRecorderState.RECORDING || this.state === LocalRecorderState.PAUSED) {
this.stop().then(() => f()).catch(() => f());
this.stop()
.then(() => f())
.catch(() => f());
} else {
f();
}
}
/**
* Downloads the recorded video through the browser. [[state]] property must be `FINISHED`
*/
download(): void {
if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('\'LocalRecord.download()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before'));
throw Error(
"'LocalRecord.download()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
);
} else {
const a: HTMLAnchorElement = document.createElement('a');
a.style.display = 'none';
@ -278,13 +301,12 @@ export class LocalRecorder {
*/
getBlob(): Blob {
if (this.state !== LocalRecorderState.FINISHED) {
throw (Error('Call \'LocalRecord.stop()\' before getting Blob file'));
throw Error("Call 'LocalRecord.stop()' before getting Blob file");
} else {
return this.blob!;
}
}
/**
* Uploads the recorded video as a binary file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example:
* ```
@ -298,7 +320,13 @@ export class LocalRecorder {
uploadAsBinary(endpoint: string, headers?: any): Promise<any> {
return new Promise((resolve, reject) => {
if (this.state !== LocalRecorderState.FINISHED) {
return reject(Error('\'LocalRecord.uploadAsBinary()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before'));
return reject(
Error(
"'LocalRecord.uploadAsBinary()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
)
);
} else {
const http = new XMLHttpRequest();
http.open('POST', endpoint, true);
@ -324,7 +352,6 @@ export class LocalRecorder {
});
}
/**
* Uploads the recorded video as a multipart file performing an HTTP/POST operation to URL `endpoint`. [[state]] property must be `FINISHED`. Optional HTTP headers can be passed as second parameter. For example:
* ```
@ -338,7 +365,13 @@ export class LocalRecorder {
uploadAsMultipartfile(endpoint: string, headers?: any): Promise<any> {
return new Promise((resolve, reject) => {
if (this.state !== LocalRecorderState.FINISHED) {
return reject(Error('\'LocalRecord.uploadAsMultipartfile()\' needs \'LocalRecord.state\' to be \'FINISHED\' (current value: \'' + this.state + '\'). Call \'LocalRecorder.stop()\' before'));
return reject(
Error(
"'LocalRecord.uploadAsMultipartfile()' needs 'LocalRecord.state' to be 'FINISHED' (current value: '" +
this.state +
"'). Call 'LocalRecorder.stop()' before"
)
);
} else {
const http = new XMLHttpRequest();
http.open('POST', endpoint, true);
@ -368,7 +401,6 @@ export class LocalRecorder {
});
}
/* Private methods */
private onStopDefault(): void {
@ -381,5 +413,4 @@ export class LocalRecorder {
this.state = LocalRecorderState.FINISHED;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -45,7 +45,6 @@ let platform: PlatformUtils;
* See available event listeners at [[PublisherEventMap]].
*/
export class Publisher extends StreamManager {
/**
* Whether the Publisher has been granted access to the requested input devices or not
*/
@ -82,7 +81,13 @@ export class Publisher extends StreamManager {
* @hidden
*/
constructor(targEl: string | HTMLElement | undefined, properties: PublisherProperties, openvidu: OpenVidu) {
super(new Stream((!!openvidu.session) ? openvidu.session : new Session(openvidu), { publisherProperties: properties, mediaConstraints: {} }), targEl);
super(
new Stream(!!openvidu.session ? openvidu.session : new Session(openvidu), {
publisherProperties: properties,
mediaConstraints: {}
}),
targEl
);
platform = PlatformUtils.getInstance();
this.properties = properties;
this.openvidu = openvidu;
@ -95,7 +100,6 @@ export class Publisher extends StreamManager {
});
}
/**
* Publish or unpublish the audio stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect
*
@ -115,7 +119,9 @@ export class Publisher extends StreamManager {
*/
publishAudio(enabled: boolean): void {
if (this.stream.audioActive !== enabled) {
const affectedMediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream();
const affectedMediaStream: MediaStream = this.stream.displayMyRemote()
? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
affectedMediaStream.getAudioTracks().forEach((track) => {
track.enabled = enabled;
});
@ -132,18 +138,22 @@ export class Publisher extends StreamManager {
if (error) {
logger.error("Error sending 'streamPropertyChanged' event", error);
} else {
this.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.session, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')]);
this.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')]);
this.session.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(this.session, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')
]);
this.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(this, this.stream, 'audioActive', enabled, !enabled, 'publishAudio')
]);
this.session.sendVideoData(this.stream.streamManager);
}
});
}
);
}
this.stream.audioActive = enabled;
logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its audio stream');
}
}
/**
* Publish or unpublish the video stream (if available). Calling this method twice in a row passing same `enabled` value will have no effect
*
@ -169,12 +179,11 @@ export class Publisher extends StreamManager {
* will be used instead.
*/
publishVideo<T extends boolean>(enabled: T, resource?: T extends false ? boolean : MediaStreamTrack): Promise<void> {
return new Promise(async (resolve, reject) => {
if (this.stream.videoActive !== enabled) {
const affectedMediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream();
const affectedMediaStream: MediaStream = this.stream.displayMyRemote()
? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
let mustRestartMediaStream = false;
affectedMediaStream.getVideoTracks().forEach((track) => {
track.enabled = enabled;
@ -212,13 +221,16 @@ export class Publisher extends StreamManager {
delete this.stream.lastVBFilter;
}, 1);
}
}
};
if (!!resource && resource instanceof MediaStreamTrack) {
await replaceVideoTrack(resource);
} else {
try {
const mediaStream = await navigator.mediaDevices.getUserMedia({ audio: false, video: this.stream.lastVideoTrackConstraints });
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: this.stream.lastVideoTrackConstraints
});
await replaceVideoTrack(mediaStream.getVideoTracks()[0]);
} catch (error) {
return reject(error);
@ -239,11 +251,23 @@ export class Publisher extends StreamManager {
if (error) {
logger.error("Error sending 'streamPropertyChanged' event", error);
} else {
this.session.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this.session, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')]);
this.emitEvent('streamPropertyChanged', [new StreamPropertyChangedEvent(this, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')]);
this.session.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(
this.session,
this.stream,
'videoActive',
enabled,
!enabled,
'publishVideo'
)
]);
this.emitEvent('streamPropertyChanged', [
new StreamPropertyChangedEvent(this, this.stream, 'videoActive', enabled, !enabled, 'publishVideo')
]);
this.session.sendVideoData(this.stream.streamManager);
}
});
}
);
}
this.stream.videoActive = enabled;
logger.info("'Publisher' has " + (enabled ? 'published' : 'unpublished') + ' its video stream');
@ -252,22 +276,19 @@ export class Publisher extends StreamManager {
});
}
/**
* Call this method before [[Session.publish]] if you prefer to subscribe to your Publisher's remote stream instead of using the local stream, as any other user would do.
*/
subscribeToRemote(value?: boolean): void {
value = (value !== undefined) ? value : true;
value = value !== undefined ? value : true;
this.isSubscribedToRemote = value;
this.stream.subscribeToMyRemote(value);
}
/**
* See [[EventDispatcher.on]]
*/
on<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this {
super.on(<any>type, handler);
if (type === 'streamCreated') {
@ -292,12 +313,10 @@ export class Publisher extends StreamManager {
return this;
}
/**
* See [[EventDispatcher.once]]
*/
once<K extends keyof PublisherEventMap>(type: K, handler: (event: PublisherEventMap[K]) => void): this {
super.once(<any>type, handler);
if (type === 'streamCreated') {
@ -322,7 +341,6 @@ export class Publisher extends StreamManager {
return this;
}
/**
* See [[EventDispatcher.off]]
*/
@ -331,7 +349,6 @@ export class Publisher extends StreamManager {
return this;
}
/**
* Replaces the current video or audio track with a different one. This allows you to replace an ongoing track with a different one
* without having to renegotiate the whole WebRTC connection (that is, initializing a new Publisher, unpublishing the previous one
@ -359,7 +376,6 @@ export class Publisher extends StreamManager {
*/
initialize(): Promise<void> {
return new Promise(async (resolve, reject) => {
let constraints: MediaStreamConstraints = {};
let constraintsAux: MediaStreamConstraints = {};
const timeForDialogEvent = 2000;
@ -368,7 +384,7 @@ export class Publisher extends StreamManager {
const errorCallback = (openViduError: OpenViduError) => {
this.accessDenied = true;
this.accessAllowed = false;
logger.error(`Publisher initialization failed. ${openViduError.name}: ${openViduError.message}`)
logger.error(`Publisher initialization failed. ${openViduError.name}: ${openViduError.message}`);
return reject(openViduError);
};
@ -378,21 +394,27 @@ export class Publisher extends StreamManager {
if (typeof MediaStreamTrack !== 'undefined' && this.properties.audioSource instanceof MediaStreamTrack) {
mediaStream.removeTrack(mediaStream.getAudioTracks()[0]);
mediaStream.addTrack((<MediaStreamTrack>this.properties.audioSource));
mediaStream.addTrack(<MediaStreamTrack>this.properties.audioSource);
}
if (typeof MediaStreamTrack !== 'undefined' && this.properties.videoSource instanceof MediaStreamTrack) {
mediaStream.removeTrack(mediaStream.getVideoTracks()[0]);
mediaStream.addTrack((<MediaStreamTrack>this.properties.videoSource));
mediaStream.addTrack(<MediaStreamTrack>this.properties.videoSource);
}
// Apply PublisherProperties.publishAudio and PublisherProperties.publishVideo
if (!!mediaStream.getAudioTracks()[0]) {
const enabled = (this.stream.audioActive !== undefined && this.stream.audioActive !== null) ? this.stream.audioActive : !!this.stream.outboundStreamOpts.publisherProperties.publishAudio;
const enabled =
this.stream.audioActive !== undefined && this.stream.audioActive !== null
? this.stream.audioActive
: !!this.stream.outboundStreamOpts.publisherProperties.publishAudio;
mediaStream.getAudioTracks()[0].enabled = enabled;
}
if (!!mediaStream.getVideoTracks()[0]) {
const enabled = (this.stream.videoActive !== undefined && this.stream.videoActive !== null) ? this.stream.videoActive : !!this.stream.outboundStreamOpts.publisherProperties.publishVideo;
const enabled =
this.stream.videoActive !== undefined && this.stream.videoActive !== null
? this.stream.videoActive
: !!this.stream.outboundStreamOpts.publisherProperties.publishVideo;
mediaStream.getVideoTracks()[0].enabled = enabled;
}
@ -411,16 +433,16 @@ export class Publisher extends StreamManager {
// https://w3c.github.io/mst-content-hint/#video-content-hints
switch (this.stream.typeOfVideo) {
case TypeOfVideo.SCREEN:
track.contentHint = "detail";
track.contentHint = 'detail';
break;
case TypeOfVideo.CUSTOM:
logger.warn("CUSTOM type video track was provided without Content Hint!");
track.contentHint = "motion";
logger.warn('CUSTOM type video track was provided without Content Hint!');
track.contentHint = 'motion';
break;
case TypeOfVideo.CAMERA:
case TypeOfVideo.IPCAM:
default:
track.contentHint = "motion";
track.contentHint = 'motion';
break;
}
logger.info(`Video track Content Hint set: '${track.contentHint}'`);
@ -438,7 +460,7 @@ export class Publisher extends StreamManager {
if (this.stream.isSendVideo()) {
// Has video track
this.getVideoDimensions().then(dimensions => {
this.getVideoDimensions().then((dimensions) => {
this.stream.videoDimensions = {
width: dimensions.width,
height: dimensions.height
@ -491,7 +513,6 @@ export class Publisher extends StreamManager {
this.clearPermissionDialogTimer(startTime, timeForDialogEvent);
mediaStream.addTrack(audioOnlyStream.getAudioTracks()[0]);
successCallback(mediaStream);
} catch (error) {
this.clearPermissionDialogTimer(startTime, timeForDialogEvent);
mediaStream.getAudioTracks().forEach((track) => {
@ -529,7 +550,6 @@ export class Publisher extends StreamManager {
errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND;
errorMessage = error.toString();
errorCallback(new OpenViduError(errorName, errorMessage));
} catch (error) {
errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND;
errorMessage = error.toString();
@ -538,12 +558,13 @@ export class Publisher extends StreamManager {
break;
case 'notallowederror':
errorName = this.stream.isSendScreen() ? OpenViduErrorName.SCREEN_CAPTURE_DENIED : OpenViduErrorName.DEVICE_ACCESS_DENIED;
errorName = this.stream.isSendScreen()
? OpenViduErrorName.SCREEN_CAPTURE_DENIED
: OpenViduErrorName.DEVICE_ACCESS_DENIED;
errorMessage = error.toString();
errorCallback(new OpenViduError(errorName, errorMessage));
break;
case 'overconstrainederror':
try {
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: false,
@ -554,20 +575,27 @@ export class Publisher extends StreamManager {
});
if (error.constraint.toLowerCase() === 'deviceid') {
errorName = OpenViduErrorName.INPUT_AUDIO_DEVICE_NOT_FOUND;
errorMessage = "Audio input device with deviceId '" + (<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.audio).deviceId!!).exact + "' not found";
errorMessage =
"Audio input device with deviceId '" +
(<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.audio).deviceId!!).exact +
"' not found";
} else {
errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR;
errorMessage = "Audio input device doesn't support the value passed for constraint '" + error.constraint + "'";
errorMessage =
"Audio input device doesn't support the value passed for constraint '" + error.constraint + "'";
}
errorCallback(new OpenViduError(errorName, errorMessage));
} catch (error) {
if (error.constraint.toLowerCase() === 'deviceid') {
errorName = OpenViduErrorName.INPUT_VIDEO_DEVICE_NOT_FOUND;
errorMessage = "Video input device with deviceId '" + (<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.video).deviceId!!).exact + "' not found";
errorMessage =
"Video input device with deviceId '" +
(<ConstrainDOMStringParameters>(<MediaTrackConstraints>constraints.video).deviceId!!).exact +
"' not found";
} else {
errorName = OpenViduErrorName.PUBLISHER_PROPERTIES_ERROR;
errorMessage = "Video input device doesn't support the value passed for constraint '" + error.constraint + "'";
errorMessage =
"Video input device doesn't support the value passed for constraint '" + error.constraint + "'";
}
errorCallback(new OpenViduError(errorName, errorMessage));
}
@ -585,13 +613,15 @@ export class Publisher extends StreamManager {
errorCallback(new OpenViduError(errorName, errorMessage));
break;
}
}
};
try {
const myConstraints = await this.openvidu.generateMediaConstraints(this.properties);
if (!!myConstraints.videoTrack && !!myConstraints.audioTrack ||
!!myConstraints.audioTrack && myConstraints.constraints?.video === false ||
!!myConstraints.videoTrack && myConstraints.constraints?.audio === false) {
const myConstraints = await this.openvidu.generateMediaConstraints(this.properties);
if (
(!!myConstraints.videoTrack && !!myConstraints.audioTrack) ||
(!!myConstraints.audioTrack && myConstraints.constraints?.video === false) ||
(!!myConstraints.videoTrack && myConstraints.constraints?.audio === false)
) {
// No need to call getUserMedia at all. MediaStreamTracks already provided
successCallback(this.openvidu.addAlreadyProvidedTracks(myConstraints, new MediaStream(), this.stream));
} else {
@ -603,7 +633,7 @@ export class Publisher extends StreamManager {
};
this.stream.setOutboundStreamOptions(outboundStreamOptions);
const definedAudioConstraint = ((constraints.audio === undefined) ? true : constraints.audio);
const definedAudioConstraint = constraints.audio === undefined ? true : constraints.audio;
constraintsAux.audio = this.stream.isSendScreen() ? false : definedAudioConstraint;
constraintsAux.video = constraints.video;
startTime = Date.now();
@ -664,9 +694,8 @@ export class Publisher extends StreamManager {
* and then try to use MediaStreamTrack.getSettingsMethod(). If not available, then we
* use the HTMLVideoElement properties videoWidth and videoHeight
*/
getVideoDimensions(): Promise<{ width: number, height: number }> {
getVideoDimensions(): Promise<{ width: number; height: number }> {
return new Promise((resolve, reject) => {
// Ionic iOS and Safari iOS supposedly require the video element to actually exist inside the DOM
const requiresDomInsertion: boolean = platform.isIonicIos() || platform.isIOSWithSafari();
@ -692,7 +721,7 @@ export class Publisher extends StreamManager {
}
return resolve({ width, height });
}
};
if (this.videoReference.readyState >= 1) {
// The video already has metadata available
@ -739,7 +768,14 @@ export class Publisher extends StreamManager {
this.videoReference.muted = true;
this.videoReference.autoplay = true;
this.videoReference.controls = false;
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) {
if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
this.videoReference.playsInline = true;
}
this.stream.setMediaStream(mediaStream);
@ -753,7 +789,9 @@ export class Publisher extends StreamManager {
* @hidden
*/
replaceTrackInMediaStream(track: MediaStreamTrack, updateLastConstraints: boolean): void {
const mediaStream: MediaStream = this.stream.displayMyRemote() ? this.stream.localMediaStreamWhenSubscribedToRemote! : this.stream.getMediaStream();
const mediaStream: MediaStream = this.stream.displayMyRemote()
? this.stream.localMediaStreamWhenSubscribedToRemote!
: this.stream.getMediaStream();
let removedTrack: MediaStreamTrack;
if (track.kind === 'video') {
removedTrack = mediaStream.getVideoTracks()[0];
@ -773,12 +811,12 @@ export class Publisher extends StreamManager {
};
if (track.kind === 'video' && updateLastConstraints) {
this.openvidu.sendNewVideoDimensionsIfRequired(this, 'trackReplaced', 50, 30);
this.openvidu.sendTrackChangedEvent(this,'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'videoActive');
if(this.stream.isLocalStreamPublished) {
this.openvidu.sendTrackChangedEvent(this, 'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'videoActive');
if (this.stream.isLocalStreamPublished) {
this.session.sendVideoData(this.stream.streamManager, 5, true, 5);
}
} else if(track.kind === 'audio' && updateLastConstraints) {
this.openvidu.sendTrackChangedEvent(this,'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'audioActive');
} else if (track.kind === 'audio' && updateLastConstraints) {
this.openvidu.sendTrackChangedEvent(this, 'trackReplaced', trackInfo.oldLabel, trackInfo.newLabel, 'audioActive');
}
if (track.kind === 'audio') {
this.stream.disableHarkSpeakingEvent(false);
@ -798,7 +836,7 @@ export class Publisher extends StreamManager {
private clearPermissionDialogTimer(startTime: number, waitTime: number): void {
clearTimeout(this.permissionDialogTimeout);
if ((Date.now() - startTime) > waitTime) {
if (Date.now() - startTime > waitTime) {
// Permission dialog was shown and now is closed
this.emitEvent('accessDialogClosed', []);
}
@ -808,19 +846,18 @@ export class Publisher extends StreamManager {
const senders: RTCRtpSender[] = this.stream.getRTCPeerConnection().getSenders();
let sender: RTCRtpSender | undefined;
if (track.kind === 'video') {
sender = senders.find(s => !!s.track && s.track.kind === 'video');
sender = senders.find((s) => !!s.track && s.track.kind === 'video');
if (!sender) {
throw new Error('There\'s no replaceable track for that kind of MediaStreamTrack in this Publisher object');
throw new Error("There's no replaceable track for that kind of MediaStreamTrack in this Publisher object");
}
} else if (track.kind === 'audio') {
sender = senders.find(s => !!s.track && s.track.kind === 'audio');
sender = senders.find((s) => !!s.track && s.track.kind === 'audio');
if (!sender) {
throw new Error('There\'s no replaceable track for that kind of MediaStreamTrack in this Publisher object');
throw new Error("There's no replaceable track for that kind of MediaStreamTrack in this Publisher object");
}
} else {
throw new Error('Unknown track kind ' + track.kind);
}
await (sender as RTCRtpSender).replaceTrack(track);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,6 @@ let platform: PlatformUtils;
* See available event listeners at [[StreamManagerEventMap]].
*/
export abstract class StreamManager extends EventDispatcher {
/**
* The Stream represented in the DOM by the Publisher/Subscriber
*/
@ -126,7 +125,14 @@ export abstract class StreamManager extends EventDispatcher {
id: '',
canplayListenerAdded: false
};
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) {
if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
this.firstVideoElement.video.playsInline = true;
}
this.targetElement = targEl;
@ -144,7 +150,6 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.on]]
*/
on<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this {
super.onAux(type, "Event '" + type + "' triggered by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler);
if (type === 'videoElementCreated') {
@ -154,11 +159,14 @@ export abstract class StreamManager extends EventDispatcher {
}
}
if (type === 'streamPlaying') {
if (this.videos[0] && this.videos[0].video &&
if (
this.videos[0] &&
this.videos[0].video &&
this.videos[0].video.currentTime > 0 &&
this.videos[0].video.paused === false &&
this.videos[0].video.ended === false &&
this.videos[0].video.readyState === 4) {
this.videos[0].video.readyState === 4
) {
this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]);
}
}
@ -180,7 +188,6 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.once]]
*/
once<K extends keyof StreamManagerEventMap>(type: K, handler: (event: StreamManagerEventMap[K]) => void): this {
super.onceAux(type, "Event '" + type + "' triggered once by '" + (this.remote ? 'Subscriber' : 'Publisher') + "'", handler);
if (type === 'videoElementCreated') {
@ -189,11 +196,14 @@ export abstract class StreamManager extends EventDispatcher {
}
}
if (type === 'streamPlaying') {
if (this.videos[0] && this.videos[0].video &&
if (
this.videos[0] &&
this.videos[0].video &&
this.videos[0].video.currentTime > 0 &&
this.videos[0].video.paused === false &&
this.videos[0].video.ended === false &&
this.videos[0].video.readyState === 4) {
this.videos[0].video.readyState === 4
) {
this.ee.emitEvent('streamPlaying', [new StreamManagerEvent(this, 'streamPlaying', undefined)]);
}
}
@ -215,19 +225,20 @@ export abstract class StreamManager extends EventDispatcher {
* See [[EventDispatcher.off]]
*/
off<K extends keyof StreamManagerEventMap>(type: K, handler?: (event: StreamManagerEventMap[K]) => void): this {
super.offAux(type, handler);
if (type === 'publisherStartSpeaking') {
// Both StreamManager and Session can have "publisherStartSpeaking" event listeners
const remainingStartSpeakingEventListeners = this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
const remainingStartSpeakingEventListeners =
this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
if (remainingStartSpeakingEventListeners === 0) {
this.stream.disableHarkSpeakingEvent(false);
}
}
if (type === 'publisherStopSpeaking') {
// Both StreamManager and Session can have "publisherStopSpeaking" event listeners
const remainingStopSpeakingEventListeners = this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
const remainingStopSpeakingEventListeners =
this.ee.getListeners(type).length + this.stream.session.ee.getListeners(type).length;
if (remainingStopSpeakingEventListeners === 0) {
this.stream.disableHarkStoppedSpeakingEvent(false);
}
@ -255,7 +266,6 @@ export abstract class StreamManager extends EventDispatcher {
* Publisher/Subscriber and has been successfully disassociated from that one and properly added to this one.
*/
addVideoElement(video: HTMLVideoElement): number {
this.initializeVideoProperties(video);
if (!this.remote && this.stream.displayMyRemote()) {
@ -280,7 +290,7 @@ export abstract class StreamManager extends EventDispatcher {
}
}
this.stream.session.streamManagers.forEach(streamManager => {
this.stream.session.streamManagers.forEach((streamManager) => {
streamManager.disassociateVideo(video);
});
@ -370,12 +380,22 @@ export abstract class StreamManager extends EventDispatcher {
* - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms)
* - `threshold`: (number) the volume at which _publisherStartSpeaking_, _publisherStopSpeaking_ events will be fired. Default **-50** (dB)
*/
updatePublisherSpeakingEventsOptions(publisherSpeakingEventsOptions: { interval?: number, threshold?: number }): void {
const currentHarkOptions = !!this.stream.harkOptions ? this.stream.harkOptions : (this.stream.session.openvidu.advancedConfiguration.publisherSpeakingEventsOptions || {});
const newInterval = (typeof publisherSpeakingEventsOptions.interval === 'number') ?
publisherSpeakingEventsOptions.interval : ((typeof currentHarkOptions.interval === 'number') ? currentHarkOptions.interval : 100);
const newThreshold = (typeof publisherSpeakingEventsOptions.threshold === 'number') ?
publisherSpeakingEventsOptions.threshold : ((typeof currentHarkOptions.threshold === 'number') ? currentHarkOptions.threshold : -50);
updatePublisherSpeakingEventsOptions(publisherSpeakingEventsOptions: { interval?: number; threshold?: number }): void {
const currentHarkOptions = !!this.stream.harkOptions
? this.stream.harkOptions
: this.stream.session.openvidu.advancedConfiguration.publisherSpeakingEventsOptions || {};
const newInterval =
typeof publisherSpeakingEventsOptions.interval === 'number'
? publisherSpeakingEventsOptions.interval
: typeof currentHarkOptions.interval === 'number'
? currentHarkOptions.interval
: 100;
const newThreshold =
typeof publisherSpeakingEventsOptions.threshold === 'number'
? publisherSpeakingEventsOptions.threshold
: typeof currentHarkOptions.threshold === 'number'
? currentHarkOptions.threshold
: -50;
this.stream.harkOptions = {
interval: newInterval,
threshold: newThreshold
@ -402,7 +422,14 @@ export abstract class StreamManager extends EventDispatcher {
video.autoplay = true;
video.controls = false;
if (platform.isSafariBrowser() || (platform.isIPhoneOrIPad() && (platform.isChromeMobileBrowser() || platform.isEdgeMobileBrowser() || platform.isOperaMobileBrowser() || platform.isFirefoxMobileBrowser()))) {
if (
platform.isSafariBrowser() ||
(platform.isIPhoneOrIPad() &&
(platform.isChromeMobileBrowser() ||
platform.isEdgeMobileBrowser() ||
platform.isOperaMobileBrowser() ||
platform.isFirefoxMobileBrowser()))
) {
video.playsInline = true;
}
@ -440,7 +467,7 @@ export abstract class StreamManager extends EventDispatcher {
}
}
this.videos.forEach(streamManagerVideo => {
this.videos.forEach((streamManagerVideo) => {
// Remove oncanplay event listener (only OpenVidu browser listener, not the user ones)
if (!!streamManagerVideo.video && !!streamManagerVideo.video.removeEventListener) {
streamManagerVideo.video.removeEventListener('canplay', this.canPlayListener);
@ -450,12 +477,14 @@ export abstract class StreamManager extends EventDispatcher {
// Only remove from DOM videos created by OpenVidu Browser (those generated by passing a valid targetElement in OpenVidu.initPublisher
// and Session.subscribe or those created by StreamManager.createVideoElement). All this videos triggered a videoElementCreated event
streamManagerVideo.video.parentNode!.removeChild(streamManagerVideo.video);
this.ee.emitEvent('videoElementDestroyed', [new VideoElementEvent(streamManagerVideo.video, this, 'videoElementDestroyed')]);
this.ee.emitEvent('videoElementDestroyed', [
new VideoElementEvent(streamManagerVideo.video, this, 'videoElementDestroyed')
]);
}
// Remove srcObject from the video
this.removeSrcObject(streamManagerVideo);
// Remove from collection of videos every video managed by OpenVidu Browser
this.videos.filter(v => !v.targetElement);
this.videos.filter((v) => !v.targetElement);
});
}
@ -480,7 +509,7 @@ export abstract class StreamManager extends EventDispatcher {
* @hidden
*/
addPlayEventToFirstVideo() {
if ((!!this.videos[0]) && (!!this.videos[0].video) && (!this.videos[0].canplayListenerAdded)) {
if (!!this.videos[0] && !!this.videos[0].video && !this.videos[0].canplayListenerAdded) {
this.activateStreamPlayingEventExceptionTimeout();
this.videos[0].video.addEventListener('canplay', this.canPlayListener);
this.videos[0].canplayListenerAdded = true;
@ -491,7 +520,7 @@ export abstract class StreamManager extends EventDispatcher {
* @hidden
*/
updateMediaStream(mediaStream: MediaStream) {
this.videos.forEach(streamManagerVideo => {
this.videos.forEach((streamManagerVideo) => {
streamManagerVideo.video.srcObject = mediaStream;
if (platform.isIonicIos()) {
// iOS Ionic. LIMITATION: must reinsert the video in the DOM for
@ -512,8 +541,8 @@ export abstract class StreamManager extends EventDispatcher {
}
/**
* @hidden
*/
* @hidden
*/
createVideo(): HTMLVideoElement {
return document.createElement('video');
}
@ -569,9 +598,18 @@ export abstract class StreamManager extends EventDispatcher {
// Trigger ExceptionEvent NO_STREAM_PLAYING_EVENT if after timeout there is no 'canplay' event
const msTimeout = this.stream.session.openvidu.advancedConfiguration.noStreamPlayingEventExceptionTimeout || 4000;
this.streamPlayingEventExceptionTimeout = setTimeout(() => {
const msg = 'StreamManager of Stream ' + this.stream.streamId + ' (' + (this.remote ? 'Subscriber' : 'Publisher') + ') did not trigger "streamPlaying" event in ' + msTimeout + ' ms';
const msg =
'StreamManager of Stream ' +
this.stream.streamId +
' (' +
(this.remote ? 'Subscriber' : 'Publisher') +
') did not trigger "streamPlaying" event in ' +
msTimeout +
' ms';
logger.warn(msg);
this.stream.session.emitEvent('exception', [new ExceptionEvent(this.stream.session, ExceptionEventName.NO_STREAM_PLAYING_EVENT, (<any>this) as Subscriber, msg)]);
this.stream.session.emitEvent('exception', [
new ExceptionEvent(this.stream.session, ExceptionEventName.NO_STREAM_PLAYING_EVENT, (<any>this) as Subscriber, msg)
]);
delete this.streamPlayingEventExceptionTimeout;
}, msTimeout);
}
@ -580,5 +618,4 @@ export abstract class StreamManager extends EventDispatcher {
clearTimeout(this.streamPlayingEventExceptionTimeout as any);
delete this.streamPlayingEventExceptionTimeout;
}
}

View File

@ -27,11 +27,10 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
/**
* Packs remote media streams. Participants automatically receive them when others publish their streams. Initialized with [[Session.subscribe]] method
*
*
* See available event listeners at [[StreamManagerEventMap]].
*/
export class Subscriber extends StreamManager {
/**
* @hidden
*/
@ -52,9 +51,12 @@ export class Subscriber extends StreamManager {
* @param value `true` to subscribe to the audio stream, `false` to unsubscribe from it
*/
subscribeToAudio(value: boolean): Subscriber {
this.stream.getMediaStream().getAudioTracks().forEach((track) => {
track.enabled = value;
});
this.stream
.getMediaStream()
.getAudioTracks()
.forEach((track) => {
track.enabled = value;
});
this.stream.audioActive = value;
logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its audio stream');
return this;
@ -65,9 +67,12 @@ export class Subscriber extends StreamManager {
* @param value `true` to subscribe to the video stream, `false` to unsubscribe from it
*/
subscribeToVideo(value: boolean): Subscriber {
this.stream.getMediaStream().getVideoTracks().forEach((track) => {
track.enabled = value;
});
this.stream
.getMediaStream()
.getVideoTracks()
.forEach((track) => {
track.enabled = value;
});
this.stream.videoActive = value;
logger.info("'Subscriber' has " + (value ? 'subscribed to' : 'unsubscribed from') + ' its video stream');
return this;
@ -93,5 +98,4 @@ export class Subscriber extends StreamManager {
removedTrack.stop();
mediaStream.addTrack(track);
}
}
}

View File

@ -20,4 +20,4 @@ export enum LocalRecorderState {
RECORDING = 'RECORDING',
PAUSED = 'PAUSED',
FINISHED = 'FINISHED'
}
}

View File

@ -19,7 +19,6 @@
* Defines property [[OpenViduError.name]]
*/
export enum OpenViduErrorName {
/**
* Browser is not supported by OpenVidu.
* Returned upon unsuccessful [[Session.connect]]
@ -38,7 +37,7 @@ export enum OpenViduErrorName {
* error occurred at the OS, browser or web page level, which prevented access to the device.
* Returned upon unsuccessful [[OpenVidu.initPublisher]] or [[OpenVidu.getUserMedia]]
*/
DEVICE_ALREADY_IN_USE = "DEVICE_ALREADY_IN_USE",
DEVICE_ALREADY_IN_USE = 'DEVICE_ALREADY_IN_USE',
/**
* The user hasn't granted permissions to capture some desktop screen when the browser asked for them.
@ -122,7 +121,6 @@ export enum OpenViduErrorName {
* Simple object to identify runtime errors on the client side
*/
export class OpenViduError {
/**
* Uniquely identifying name of the error
*/
@ -140,5 +138,4 @@ export class OpenViduError {
this.name = name;
this.message = message;
}
}
}

View File

@ -20,4 +20,4 @@ export enum TypeOfVideo {
SCREEN = 'SCREEN',
CUSTOM = 'CUSTOM',
IPCAM = 'IPCAM'
}
}

View File

@ -19,7 +19,6 @@
* How the video will be inserted in the DOM for Publishers and Subscribers. See [[PublisherProperties.insertMode]] and [[SubscriberProperties.insertMode]]
*/
export enum VideoInsertMode {
/**
* Video inserted after the target element (as next sibling)
*/
@ -40,5 +39,4 @@ export enum VideoInsertMode {
* Video replaces target element
*/
REPLACE = 'REPLACE'
}
}

View File

@ -19,14 +19,12 @@ import { Event } from './Event';
import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session';
/**
* Triggered by:
* - [[connectionCreated]]
* - [[connectionDestroyed]]
*/
export class ConnectionEvent extends Event {
/**
* Connection object that was created or destroyed
*/
@ -58,6 +56,5 @@ export class ConnectionEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -25,7 +25,6 @@ import { Event } from './Event';
* Triggered by [[connectionPropertyChanged]]
*/
export class ConnectionPropertyChangedEvent extends Event {
/**
* The Connection whose property has changed
*/
@ -61,6 +60,5 @@ export class ConnectionPropertyChangedEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
callDefaultBehavior() {}
}

View File

@ -20,7 +20,6 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
import { Session } from '../../OpenVidu/Session';
export abstract class Event {
/**
* Whether the event has a default behavior that may be prevented by calling [[Event.preventDefault]]
*/
@ -73,7 +72,7 @@ export abstract class Event {
*/
preventDefault() {
// tslint:disable-next-line:no-empty
this.callDefaultBehavior = () => { };
this.callDefaultBehavior = () => {};
this.hasBeenPrevented = true;
}
@ -81,5 +80,4 @@ export abstract class Event {
* @hidden
*/
abstract callDefaultBehavior();
}
}

View File

@ -18,4 +18,4 @@
/**
* All OpenVidu Browser events inherit from this interface
*/
export interface EventMap { }
export interface EventMap {}

View File

@ -21,19 +21,18 @@ import { StreamManagerEventMap } from './StreamManagerEventMap';
/**
* Events dispatched by [[Publisher]] object. Manage event listeners with
* [[Publisher.on]], [[Publisher.once]] and [[Publisher.off]] methods.
*
*
* Example:
*
*
* ```javascript
* publisher.on('accessDenied', () => {
* publisher.on('accessDenied', () => {
* console.error('Camera access has been denied!');
* }
*
*
* publisher.off('accessDenied');
* ```
*/
export interface PublisherEventMap extends StreamManagerEventMap {
/**
* Event dispatched when the [[Publisher]] has been published to the session (see [[Session.publish]]).
*/
@ -46,7 +45,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/**
* Event dispatched when a Publisher tries to access some media input device and has the required permissions to do so.
*
*
* This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application
* has permissions to use the devices. This usually means the user has accepted the permissions dialog that the
* browser will show when trying to access the camera/microphone/screen.
@ -55,7 +54,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/**
* Event dispatched when a Publisher tries to access some media input device and does NOT have the required permissions to do so.
*
*
* This happens when calling [[OpenVidu.initPublisher]] or [[OpenVidu.initPublisherAsync]] and the application
* lacks the required permissions to use the devices. This usually means the user has NOT accepted the permissions dialog that the
* browser will show when trying to access the camera/microphone/screen.
@ -64,7 +63,7 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/**
* Event dispatched when the pop-up shown by the browser to request permissions for the input media devices is opened.
*
*
* You can use this event to alert the user about granting permissions for your website. Note that this event is artificially
* generated based only on time intervals when accessing media devices. A heavily overloaded client device that simply takes more
* than usual to access the media device could produce a false trigger of this event.
@ -74,8 +73,8 @@ export interface PublisherEventMap extends StreamManagerEventMap {
/**
* Event dispatched after the user clicks on "Allow" or "Block" in the pop-up shown by the browser to request permissions
* for the input media devices.
*
*
* This event can only be triggered after an [[accessDialogOpened]] event has been previously triggered.
*/
accessDialogClosed: never;
}
}

View File

@ -30,19 +30,18 @@ import { StreamPropertyChangedEvent } from '../StreamPropertyChangedEvent';
/**
* Events dispatched by [[Session]] object. Manage event listeners with
* [[Session.on]], [[Session.once]] and [[Session.off]] methods.
*
*
* Example:
*
*
* ```javascript
* session.on('connectionCreated', (event) => {
* session.on('connectionCreated', (event) => {
* console.log('Connection ' + event.connection.connectionId + ' created');
* }
*
*
* session.off('connectionDestroyed');
* ```
*/
export interface SessionEventMap extends EventMap {
/**
* Event dispatched when a new user has connected to the session.
*

View File

@ -22,21 +22,20 @@ import { StreamPropertyChangedEvent } from '../StreamPropertyChangedEvent';
import { VideoElementEvent } from '../VideoElementEvent';
/**
* Events dispatched by [[StreamManager]] object. Manage event listeners with
* Events dispatched by [[StreamManager]] object. Manage event listeners with
* [[StreamManager.on]], [[StreamManager.once]] and [[StreamManager.off]] methods.
*
*
* Example:
*
*
* ```javascript
* streamManager.on('videoElementCreated', (event) => {
* streamManager.on('videoElementCreated', (event) => {
* console.log('New video element created:', event.element);
* }
*
*
* streamManager.off('videoElementCreated');
* ```
*/
export interface StreamManagerEventMap extends EventMap {
/**
* Event dispatched when a new HTML video element has been inserted into DOM by OpenVidu Browser library. See
* [Manage video players](/en/stable/cheatsheet/manage-videos) section.
@ -73,7 +72,7 @@ export interface StreamManagerEventMap extends EventMap {
/**
* Event dispatched when the user owning the stream has started speaking.
*
*
* Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
@ -82,10 +81,10 @@ export interface StreamManagerEventMap extends EventMap {
/**
* Event dispatched when the user owning the stream has stopped speaking.
*
*
* Extra information:
* - This event will only be triggered for **streams that have audio tracks** ([[Stream.hasAudio]] must be true).
* - Further configuration can be applied on how the event is dispatched by setting property `publisherSpeakingEventsOptions` in the call of [[OpenVidu.setAdvancedConfiguration]].
*/
publisherStopSpeaking: PublisherSpeakingEvent;
}
}

View File

@ -20,15 +20,13 @@ import { Stream } from '../../OpenVidu/Stream';
import { Subscriber } from '../../OpenVidu/Subscriber';
import { Event } from './Event';
/**
* Defines property [[ExceptionEvent.name]]
*/
export enum ExceptionEventName {
/**
* There was an unexpected error on the server-side processing an ICE candidate generated and sent by the client-side.
*
*
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Session]] object.
*/
ICE_CANDIDATE_ERROR = 'ICE_CANDIDATE_ERROR',
@ -36,11 +34,11 @@ export enum ExceptionEventName {
/**
* The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
* of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `failed` status.
*
*
* This is a terminal error that won't have any kind of possible recovery. If the client is still connected to OpenVidu Server,
* then an automatic reconnection process of the media stream is immediately performed. If the ICE connection has broken due to
* a total network drop, then no automatic reconnection process will be possible.
*
*
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object.
*/
ICE_CONNECTION_FAILED = 'ICE_CONNECTION_FAILED',
@ -48,15 +46,15 @@ export enum ExceptionEventName {
/**
* The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
* of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `disconnected` status.
*
*
* This is not a terminal error, and it is possible for the ICE connection to be reconnected. If the client is still connected to
* OpenVidu Server and after certain timeout the ICE connection has not reached a success or terminal status, then an automatic
* reconnection process of the media stream is performed. If the ICE connection has broken due to a total network drop, then no
* automatic reconnection process will be possible.
*
*
* You can customize the timeout for the reconnection attempt with property [[OpenViduAdvancedConfiguration.iceConnectionDisconnectedExceptionTimeout]],
* which by default is 4000 milliseconds.
*
*
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Stream]] object.
*/
ICE_CONNECTION_DISCONNECTED = 'ICE_CONNECTION_DISCONNECTED',
@ -64,20 +62,20 @@ export enum ExceptionEventName {
/**
* A [[Subscriber]] object has not fired event `streamPlaying` after certain timeout. `streamPlaying` event belongs to [[StreamManagerEvent]]
* category. It wraps Web API native event [canplay](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canplay_event).
*
*
* OpenVidu Browser can take care of the video players (see [here](/en/stable/cheatsheet/manage-videos/#let-openvidu-take-care-of-the-video-players)),
* or you can take care of video players on your own (see [here](/en/stable/cheatsheet/manage-videos/#you-take-care-of-the-video-players)).
* Either way, whenever a [[Subscriber]] object is commanded to attach its [[Stream]] to a video element, it is supposed to fire `streamPlaying`
* event shortly after. If it does not, then we can safely assume that something wrong has happened while playing the remote video and the
* event shortly after. If it does not, then we can safely assume that something wrong has happened while playing the remote video and the
* application may be notified through this specific ExceptionEvent.
*
*
* The timeout can be configured with property [[OpenViduAdvancedConfiguration.noStreamPlayingEventExceptionTimeout]]. By default it is 4000 milliseconds.
*
*
* This is just an informative exception. It only means that a remote Stream that is supposed to be playing by a video player has not done so
* in a reasonable time. But the lack of the event can be caused by multiple reasons. If a Subscriber is not playing its Stream, the origin
* of the problem could be located at the Publisher side. Or may be caused by a transient network problem. But it also could be a problem with
* autoplay permissions. Bottom line, the cause can be very varied, and depending on the application the lack of the event could even be expected.
*
*
* [[ExceptionEvent]] objects with this [[ExceptionEvent.name]] will have as [[ExceptionEvent.origin]] property a [[Subscriber]] object.
*/
NO_STREAM_PLAYING_EVENT = 'NO_STREAM_PLAYING_EVENT'
@ -87,7 +85,6 @@ export enum ExceptionEventName {
* Triggered by [[SessionEventMap.exception]]
*/
export class ExceptionEvent extends Event {
/**
* Name of the exception
*/
@ -126,6 +123,5 @@ export class ExceptionEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -18,12 +18,10 @@
import { Event } from './Event';
import { Filter } from '../../OpenVidu/Filter';
/**
* Defines every event dispatched by audio/video stream filters. You can subscribe to filter events by calling [[Filter.addEventListener]]
*/
export class FilterEvent extends Event {
/**
* Data of the event
*/
@ -41,6 +39,5 @@ export class FilterEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -23,7 +23,6 @@ import { Connection } from '../../OpenVidu/Connection';
* Triggered by [[networkQualityLevelChanged]]
*/
export class NetworkQualityLevelChangedEvent extends Event {
/**
* New value of the network quality level
*/
@ -37,7 +36,7 @@ export class NetworkQualityLevelChangedEvent extends Event {
/**
* Connection for whom the network quality level changed
*/
connection: Connection
connection: Connection;
/**
* @hidden
@ -53,6 +52,5 @@ export class NetworkQualityLevelChangedEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
callDefaultBehavior() {}
}

View File

@ -20,14 +20,12 @@ import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session';
import { StreamManager } from '../../OpenVidu/StreamManager';
/**
* Triggered by:
* - `publisherStartSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStartSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStartSpeaking) objects)
* - `publisherStopSpeaking` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#publisherStopSpeaking) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#publisherStopSpeaking) objects)
*/
export class PublisherSpeakingEvent extends Event {
/**
* The client that started or stopped speaking
*/
@ -52,6 +50,5 @@ export class PublisherSpeakingEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -18,14 +18,12 @@
import { Event } from './Event';
import { Session } from '../../OpenVidu/Session';
/**
* Triggered by:
* - [[recordingStarted]]
* - [[recordingStopped]]
*/
export class RecordingEvent extends Event {
/**
* The recording ID generated in openvidu-server
*/
@ -68,6 +66,5 @@ export class RecordingEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -24,12 +24,10 @@ import { OpenViduLogger } from '../Logger/OpenViduLogger';
*/
const logger: OpenViduLogger = OpenViduLogger.getInstance();
/**
* Triggered by [[sessionDisconnected]]
*/
export class SessionDisconnectedEvent extends Event {
/**
* - "disconnect": you have called `Session.disconnect()`
* - "forceDisconnectByUser": you have been evicted from the Session by other user calling `Session.forceDisconnect()`
@ -39,7 +37,7 @@ export class SessionDisconnectedEvent extends Event {
* Session object will always have previously dispatched a `reconnecting` event. If the reconnection process succeeds,
* Session object will dispatch a `reconnected` event. If it fails, Session object will dispatch a SessionDisconnectedEvent
* with reason "networkDisconnect"
* - "nodeCrashed": a node has crashed in the server side. You can use this reason to ask your application's backend to reconnect
* - "nodeCrashed": a node has crashed in the server side. You can use this reason to ask your application's backend to reconnect
* to a new session to replace the crashed one
*/
reason: string;
@ -56,13 +54,12 @@ export class SessionDisconnectedEvent extends Event {
* @hidden
*/
callDefaultBehavior() {
logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'");
const session = <Session>this.target;
// Dispose and delete all remote Connections
session.remoteConnections.forEach(remoteConnection => {
session.remoteConnections.forEach((remoteConnection) => {
const connectionId = remoteConnection.connectionId;
if (!!session.remoteConnections.get(connectionId)?.stream) {
session.remoteConnections.get(connectionId)?.stream!.disposeWebRtcPeer();
@ -79,5 +76,4 @@ export class SessionDisconnectedEvent extends Event {
session.remoteConnections.delete(connectionId);
});
}
}
}

View File

@ -19,14 +19,12 @@ import { Event } from './Event';
import { Connection } from '../../OpenVidu/Connection';
import { Session } from '../../OpenVidu/Session';
/**
* Triggered by [[SessionEventMap.signal]]
*/
export class SignalEvent extends Event {
/**
* The type of signal. It is string `"signal"` for those signals sent with no [[SignalOptions.type]] property, and `"signal:type"` if was sent with a
* The type of signal. It is string `"signal"` for those signals sent with no [[SignalOptions.type]] property, and `"signal:type"` if was sent with a
* valid [[SignalOptions.type]] property.
*
* The client must be specifically subscribed to `Session.on('signal:type', function(signalEvent) {...})` to trigger that type of signal.
@ -62,6 +60,5 @@ export class SignalEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -32,7 +32,6 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
* - `streamDestroyed` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamDestroyed) and [Publisher](/en/stable/api/openvidu-browser/interfaces/PublisherEventMap.html#streamDestroyed) objects)
*/
export class StreamEvent extends Event {
/**
* Stream object that was created or destroyed
*/
@ -68,7 +67,6 @@ export class StreamEvent extends Event {
*/
callDefaultBehavior() {
if (this.type === 'streamDestroyed') {
if (this.target instanceof Session) {
// Remote Stream
logger.info("Calling default behavior upon '" + this.type + "' event dispatched by 'Session'");
@ -82,7 +80,7 @@ export class StreamEvent extends Event {
// Delete Publisher object from OpenVidu publishers array
const openviduPublishers = (<Publisher>this.target).openvidu.publishers;
for (let i = 0; i < openviduPublishers.length; i++) {
if (openviduPublishers[i] === (<Publisher>this.target)) {
if (openviduPublishers[i] === <Publisher>this.target) {
openviduPublishers.splice(i, 1);
break;
}
@ -109,8 +107,6 @@ export class StreamEvent extends Event {
}
}
}
}
}
}
}

View File

@ -24,12 +24,11 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
* - [[streamAudioVolumeChange]]
*/
export class StreamManagerEvent extends Event {
/**
* For `streamAudioVolumeChange` event:
* - `{newValue: number, oldValue: number}`: new and old audio volume values. These values are between -100 (silence) and 0 (loudest possible volume).
* They are not exact and depend on how the browser is managing the audio track, but -100 and 0 can be taken as limit values.
*
*
* For `streamPlaying` event undefined
*/
value: Object | undefined;
@ -46,6 +45,5 @@ export class StreamManagerEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -24,7 +24,6 @@ import { StreamManager } from '../../OpenVidu/StreamManager';
* Triggered by `streamPropertyChanged` (available for [Session](/en/stable/api/openvidu-browser/interfaces/SessionEventMap.html#streamPropertyChanged) and [StreamManager](/en/stable/api/openvidu-browser/interfaces/StreamManagerEventMap.html#streamPropertyChanged) objects)
*/
export class StreamPropertyChangedEvent extends Event {
/**
* The Stream whose property has changed. You can always identify the user publishing the changed stream by consulting property [[Stream.connection]]
*/
@ -57,7 +56,14 @@ export class StreamPropertyChangedEvent extends Event {
/**
* @hidden
*/
constructor(target: Session | StreamManager, stream: Stream, changedProperty: string, newValue: Object, oldValue: Object, reason: string) {
constructor(
target: Session | StreamManager,
stream: Stream,
changedProperty: string,
newValue: Object,
oldValue: Object,
reason: string
) {
super(false, target, 'streamPropertyChanged');
this.stream = stream;
this.changedProperty = changedProperty;
@ -70,6 +76,5 @@ export class StreamPropertyChangedEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -18,14 +18,12 @@
import { Event } from './Event';
import { StreamManager } from '../../OpenVidu/StreamManager';
/**
* Triggered by:
* - [[videoElementCreated]]
* - [[videoElementDestroyed]]
*/
export class VideoElementEvent extends Event {
/**
* Video element that was created or destroyed
*/
@ -43,6 +41,5 @@ export class VideoElementEvent extends Event {
* @hidden
*/
// tslint:disable-next-line:no-empty
callDefaultBehavior() { }
}
callDefaultBehavior() {}
}

View File

@ -19,4 +19,4 @@ export interface CustomMediaStreamConstraints {
constraints: MediaStreamConstraints;
audioTrack: MediaStreamTrack | undefined;
videoTrack: MediaStreamTrack | undefined;
}
}

View File

@ -18,4 +18,4 @@ export interface IceServerProperties {
url: string;
username?: string;
credential?: string;
}
}

View File

@ -29,6 +29,6 @@ export interface InboundStreamOptions {
videoActive: boolean;
typeOfVideo: TypeOfVideo;
frameRate: number;
videoDimensions: { width: number, height: number };
videoDimensions: { width: number; height: number };
filter?: Filter;
}
}

View File

@ -20,4 +20,4 @@ import { PublisherProperties } from '../Public/PublisherProperties';
export interface OutboundStreamOptions {
publisherProperties: PublisherProperties;
mediaConstraints: MediaStreamConstraints;
}
}

View File

@ -19,4 +19,4 @@ export interface SessionOptions {
sessionId: string;
participantId: string;
metadata: string;
}
}

View File

@ -21,4 +21,4 @@ export interface SignalOptions {
type?: string;
to?: Connection[];
data?: string;
}
}

View File

@ -29,4 +29,4 @@ export interface StreamOptionsServer {
frameRate: number;
videoDimensions: string;
filter: Filter;
}
}

View File

@ -19,7 +19,6 @@
* See [[Session.capabilities]]
*/
export interface Capabilities {
/**
* `true` if the client can call [[Session.forceDisconnect]], `false` if not
*/
@ -39,5 +38,4 @@ export interface Capabilities {
* `true` if the client can call [[Session.subscribe]], `false` if not (true for every user for now)
*/
subscribe: boolean;
}
}

View File

@ -19,7 +19,6 @@
* See [[OpenVidu.getDevices]]
*/
export interface Device {
/**
* `"videoinput"`, `"audioinput"`
*/
@ -34,4 +33,4 @@ export interface Device {
* Description of the device. An empty string if the user hasn't granted permissions to the site to access the device
*/
label: string;
}
}

View File

@ -19,7 +19,6 @@
* See [[OpenVidu.setAdvancedConfiguration]]
*/
export interface OpenViduAdvancedConfiguration {
/**
* Array of [RTCIceServer](https://developer.mozilla.org/en-US/docs/Web/API/RTCIceServer) to be used by OpenVidu Browser. By default OpenVidu will generate the required credentials to use the COTURN server hosted along OpenVidu Server
* You can also set this property to string 'freeice' to force the use of free STUN servers instead (got thanks to [freeice](https://github.com/DamonOehlman/freeice) library).
@ -36,7 +35,7 @@ export interface OpenViduAdvancedConfiguration {
* Custom configuration for the [[PublisherSpeakingEvent]] feature and the [StreamManagerEvent.streamAudioVolumeChange](/en/stable/api/openvidu-browser/classes/StreamManagerEvent.html) feature. It is an object which includes the following optional properties:
* - `interval`: (number) how frequently the analyser polls the audio stream to check if speaking has started/stopped or audio volume has changed. Default **100** (ms)
* - `threshold`: (number) the volume at which _publisherStartSpeaking_ and _publisherStopSpeaking_ events will be fired. Default **-50** (dB)
*
*
* This sets the global default configuration that will affect all streams, but you can later customize these values for each specific stream by calling [[StreamManager.updatePublisherSpeakingEventsOptions]]
*/
publisherSpeakingEventsOptions?: {
@ -47,10 +46,10 @@ export interface OpenViduAdvancedConfiguration {
/**
* Determines the automatic reconnection process policy. Whenever the client's network drops, OpenVidu Browser starts a reconnection process with OpenVidu Server. After network is recovered, OpenVidu Browser automatically
* inspects all of its media streams to see their status. For any of them that are broken, it asks OpenVidu Server for a forced and silent reconnection.
*
*
* This policy is technically enough to recover any broken media connection after a network drop, but in practice it has been proven that OpenVidu Browser may think a media connection has properly recovered when in fact it has not.
* This is not a common case, and it only affects Publisher streams, but it may occur. This property allows **forcing OpenVidu Browser to reconnect all of its outgoing media streams** after a network drop regardless of their supposed status.
*
*
* Default to `false`.
*/
forceMediaReconnectionAfterNetworkDrop?: boolean;
@ -59,16 +58,15 @@ export interface OpenViduAdvancedConfiguration {
* The milliseconds that must elapse after triggering [[ExceptionEvent]] of name [`ICE_CONNECTION_DISCONNECTED`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#ICE_CONNECTION_DISCONNECTED) to perform an automatic reconnection process of the affected media stream.
* This automatic reconnection process can only take place if the client still has network connection to OpenVidu Server. If the ICE connection has broken because of a total network drop,
* then no reconnection process will be possible at all.
*
*
* Default to `4000`.
*/
iceConnectionDisconnectedExceptionTimeout?: number;
/**
* The milliseconds that must elapse for the [[ExceptionEvent]] of name [`NO_STREAM_PLAYING_EVENT`](/en/stable/api/openvidu-browser/enums/ExceptionEventName.html#NO_STREAM_PLAYING_EVENT) to be fired.
*
*
* Default to `4000`.
*/
noStreamPlayingEventExceptionTimeout?: number;
}

View File

@ -22,7 +22,6 @@ import { VideoInsertMode } from '../../Enums/VideoInsertMode';
* See [[OpenVidu.initPublisher]]
*/
export interface PublisherProperties {
/**
* Which device should provide the audio source. Can be:
* - Property `deviceId` of a [[Device]]
@ -98,5 +97,4 @@ export interface PublisherProperties {
* Define a filter to apply in the Publisher's stream
*/
filter?: Filter;
}

View File

@ -21,7 +21,6 @@ import { Connection } from '../../../OpenVidu/Connection';
* See [[Session.signal]]
*/
export interface SignalOptions {
/**
* The actual message of the signal.
*/
@ -38,4 +37,4 @@ export interface SignalOptions {
* receive it. Participants subscribed to `Session.on('signal')` will receive all signals.
*/
type?: string;
}
}

View File

@ -17,9 +17,7 @@
import { VideoInsertMode } from '../../Enums/VideoInsertMode';
export interface StreamManagerVideo {
/**
* DOM video element displaying the StreamManager's stream
*/
@ -56,6 +54,4 @@ export interface StreamManagerVideo {
* @hidden
*/
canplayListenerAdded: boolean;
}
}

View File

@ -21,7 +21,6 @@ import { VideoInsertMode } from '../../Enums/VideoInsertMode';
* See [[Session.subscribe]]
*/
export interface SubscriberProperties {
/**
* How the video element of the subscriber should be inserted in the DOM
* @default VideoInsertMode.APPEND
@ -39,5 +38,4 @@ export interface SubscriberProperties {
* @default true
*/
subscribeToVideo?: boolean;
}
}

View File

@ -1,61 +1,52 @@
function Mapper() {
var sources = {};
var sources = {};
this.forEach = function (callback) {
for (var key in sources) {
var source = sources[key];
this.forEach = function (callback) {
for (var key in sources) {
var source = sources[key];
for (var key2 in source)
callback(source[key2]);
for (var key2 in source) callback(source[key2]);
}
};
};
this.get = function (id, source) {
var ids = sources[source];
if (ids == undefined)
return undefined;
this.get = function (id, source) {
var ids = sources[source];
if (ids == undefined) return undefined;
return ids[id];
};
return ids[id];
};
this.remove = function (id, source) {
var ids = sources[source];
if (ids == undefined)
return;
this.remove = function (id, source) {
var ids = sources[source];
if (ids == undefined) return;
delete ids[id];
delete ids[id];
// Check it's empty
for (var i in ids) {
return false
}
// Check it's empty
for (var i in ids) {
return false;
}
delete sources[source];
};
delete sources[source];
};
this.set = function (value, id, source) {
if (value == undefined)
return this.remove(id, source);
this.set = function (value, id, source) {
if (value == undefined) return this.remove(id, source);
var ids = sources[source];
if (ids == undefined)
sources[source] = ids = {};
ids[id] = value;
};
};
var ids = sources[source];
if (ids == undefined) sources[source] = ids = {};
ids[id] = value;
};
}
Mapper.prototype.pop = function (id, source) {
var value = this.get(id, source);
if (value == undefined)
return undefined;
var value = this.get(id, source);
if (value == undefined) return undefined;
this.remove(id, source);
this.remove(id, source);
return value;
return value;
};
module.exports = Mapper;
module.exports = Mapper;

View File

@ -17,5 +17,4 @@
var JsonRpcClient = require('./jsonrpcclient');
exports.JsonRpcClient = JsonRpcClient;
exports.JsonRpcClient = JsonRpcClient;

View File

@ -19,9 +19,11 @@ var RpcBuilder = require('../');
var WebSocketWithReconnection = require('./transports/webSocketWithReconnection');
var OpenViduLogger = require('../../../Logger/OpenViduLogger').OpenViduLogger;
Date.now = Date.now || function () {
return +new Date;
};
Date.now =
Date.now ||
function () {
return +new Date();
};
var PING_INTERVAL = 5000;
@ -51,7 +53,6 @@ var Logger = OpenViduLogger.getInstance();
* </pre>
*/
function JsonRpcClient(configuration) {
var self = this;
var wsConfig = configuration.ws;
@ -71,13 +72,13 @@ function JsonRpcClient(configuration) {
var onerror = wsConfig.onerror;
configuration.rpc.pull = function (params, request) {
request.reply(null, "push");
}
request.reply(null, 'push');
};
wsConfig.onreconnecting = function () {
Logger.debug("--------- ONRECONNECTING -----------");
Logger.debug('--------- ONRECONNECTING -----------');
if (status === RECONNECTING) {
Logger.error("Websocket already in RECONNECTING state when receiving a new ONRECONNECTING message. Ignoring it");
Logger.error('Websocket already in RECONNECTING state when receiving a new ONRECONNECTING message. Ignoring it');
return;
}
@ -87,12 +88,12 @@ function JsonRpcClient(configuration) {
if (onreconnecting) {
onreconnecting();
}
}
};
wsConfig.onreconnected = function () {
Logger.debug("--------- ONRECONNECTED -----------");
Logger.debug('--------- ONRECONNECTED -----------');
if (status === CONNECTED) {
Logger.error("Websocket already in CONNECTED state when receiving a new ONRECONNECTED message. Ignoring it");
Logger.error('Websocket already in CONNECTED state when receiving a new ONRECONNECTED message. Ignoring it');
return;
}
status = CONNECTED;
@ -102,12 +103,12 @@ function JsonRpcClient(configuration) {
if (onreconnected) {
onreconnected();
}
}
};
wsConfig.onconnected = function () {
Logger.debug("--------- ONCONNECTED -----------");
Logger.debug('--------- ONCONNECTED -----------');
if (status === CONNECTED) {
Logger.error("Websocket already in CONNECTED state when receiving a new ONCONNECTED message. Ignoring it");
Logger.error('Websocket already in CONNECTED state when receiving a new ONCONNECTED message. Ignoring it');
return;
}
status = CONNECTED;
@ -118,10 +119,10 @@ function JsonRpcClient(configuration) {
if (onconnected) {
onconnected();
}
}
};
wsConfig.onerror = function (error) {
Logger.debug("--------- ONERROR -----------");
Logger.debug('--------- ONERROR -----------');
status = DISCONNECTED;
@ -130,7 +131,7 @@ function JsonRpcClient(configuration) {
if (onerror) {
onerror(error);
}
}
};
var ws = new WebSocketWithReconnection(wsConfig);
@ -141,37 +142,41 @@ function JsonRpcClient(configuration) {
ping_request_timeout: configuration.rpc.heartbeatRequestTimeout
};
var rpc = new RpcBuilder(RpcBuilder.packers.JsonRPC, rpcBuilderOptions, ws,
function (request) {
var rpc = new RpcBuilder(RpcBuilder.packers.JsonRPC, rpcBuilderOptions, ws, function (request) {
Logger.debug('Received request: ' + JSON.stringify(request));
Logger.debug('Received request: ' + JSON.stringify(request));
try {
var func = configuration.rpc[request.method];
try {
var func = configuration.rpc[request.method];
if (func === undefined) {
Logger.error("Method " + request.method + " not registered in client");
} else {
func(request.params, request);
}
} catch (err) {
Logger.error('Exception processing request: ' + JSON.stringify(request));
Logger.error(err);
if (func === undefined) {
Logger.error('Method ' + request.method + ' not registered in client');
} else {
func(request.params, request);
}
});
} catch (err) {
Logger.error('Exception processing request: ' + JSON.stringify(request));
Logger.error(err);
}
});
this.send = function (method, params, callback) {
var requestTime = Date.now();
rpc.encode(method, params, function (error, result) {
if (error) {
try {
Logger.error("ERROR:" + error.message + " in Request: method:" +
method + " params:" + JSON.stringify(params) + " request:" +
error.request);
Logger.error(
'ERROR:' +
error.message +
' in Request: method:' +
method +
' params:' +
JSON.stringify(params) +
' request:' +
error.request
);
if (error.data) {
Logger.error("ERROR DATA:" + JSON.stringify(error.data));
Logger.error('ERROR DATA:' + JSON.stringify(error.data));
}
} catch (e) {}
error.requestTime = requestTime;
@ -183,11 +188,10 @@ function JsonRpcClient(configuration) {
callback(error, result);
}
});
}
};
function updateNotReconnectIfLessThan() {
Logger.debug("notReconnectIfNumLessThan = " + pingNextNum + ' (old=' +
notReconnectIfNumLessThan + ')');
Logger.debug('notReconnectIfNumLessThan = ' + pingNextNum + ' (old=' + notReconnectIfNumLessThan + ')');
notReconnectIfNumLessThan = pingNextNum;
}
@ -201,23 +205,25 @@ function JsonRpcClient(configuration) {
}
pingNextNum++;
self.send('ping', params, (function (pingNum) {
return function (error, result) {
if (error) {
Logger.debug("Error in ping request #" + pingNum + " (" +
error.message + ")");
if (pingNum > notReconnectIfNumLessThan) {
enabledPings = false;
updateNotReconnectIfLessThan();
Logger.debug("Server did not respond to ping message #" +
pingNum + ". Reconnecting... ");
ws.reconnectWs();
self.send(
'ping',
params,
(function (pingNum) {
return function (error, result) {
if (error) {
Logger.debug('Error in ping request #' + pingNum + ' (' + error.message + ')');
if (pingNum > notReconnectIfNumLessThan) {
enabledPings = false;
updateNotReconnectIfLessThan();
Logger.debug('Server did not respond to ping message #' + pingNum + '. Reconnecting... ');
ws.reconnectWs();
}
}
}
}
})(pingNextNum));
};
})(pingNextNum)
);
} else {
Logger.debug("Trying to send ping, but ping is not enabled");
Logger.debug('Trying to send ping, but ping is not enabled');
}
}
@ -227,7 +233,7 @@ function JsonRpcClient(configuration) {
*/
function usePing() {
if (!pingPongStarted) {
Logger.debug("Starting ping (if configured)")
Logger.debug('Starting ping (if configured)');
pingPongStarted = true;
if (configuration.heartbeat != undefined) {
@ -246,30 +252,29 @@ function JsonRpcClient(configuration) {
}
this.close = function (code, reason) {
Logger.debug("Closing with code: " + code + " because: " + reason);
Logger.debug('Closing with code: ' + code + ' because: ' + reason);
if (pingInterval != undefined) {
Logger.debug("Clearing ping interval");
Logger.debug('Clearing ping interval');
clearInterval(pingInterval);
}
pingPongStarted = false;
enabledPings = false;
ws.close(code, reason);
}
};
this.reconnect = function () {
ws.reconnectWs();
}
};
this.resetPing = function () {
enabledPings = true;
pingNextNum = 0;
usePing();
}
};
this.getReadyState = function () {
return ws.getReadyState();
}
};
}
module.exports = JsonRpcClient;
module.exports = JsonRpcClient;

View File

@ -17,4 +17,4 @@
var WebSocketWithReconnection = require('./webSocketWithReconnection');
exports.WebSocketWithReconnection = WebSocketWithReconnection;
exports.WebSocketWithReconnection = WebSocketWithReconnection;

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
"use strict";
'use strict';
var OpenViduLogger = require('../../../../Logger/OpenViduLogger').OpenViduLogger;
var Logger = OpenViduLogger.getInstance();
@ -45,17 +45,14 @@ function WebSocketWithReconnection(config) {
var ws = new WebSocket(wsUri);
ws.onopen = () => {
Logger.debug("WebSocket connected to " + wsUri);
Logger.debug('WebSocket connected to ' + wsUri);
if (config.onconnected) {
config.onconnected();
}
};
ws.onerror = error => {
Logger.error(
"Could not connect to " + wsUri + " (invoking onerror if defined)",
error
);
ws.onerror = (error) => {
Logger.error('Could not connect to ' + wsUri + ' (invoking onerror if defined)', error);
if (config.onerror) {
config.onerror(error);
}
@ -64,31 +61,27 @@ function WebSocketWithReconnection(config) {
var reconnectionOnClose = () => {
if (ws.readyState === CLOSED) {
if (closing) {
Logger.debug("Connection closed by user");
Logger.debug('Connection closed by user');
} else {
if (config.ismasternodecrashed()) {
Logger.error("Master Node has crashed. Stopping reconnection process");
Logger.error('Master Node has crashed. Stopping reconnection process');
} else {
Logger.debug("Connection closed unexpectedly. Reconnecting...");
Logger.debug('Connection closed unexpectedly. Reconnecting...');
reconnect(MAX_RETRIES, 1);
}
}
} else {
Logger.debug("Close callback from previous websocket. Ignoring it");
Logger.debug('Close callback from previous websocket. Ignoring it');
}
};
ws.onclose = reconnectionOnClose;
function reconnect(maxRetries, numRetries) {
Logger.debug(
"reconnect (attempt #" + numRetries + ", max=" + maxRetries + ")"
);
Logger.debug('reconnect (attempt #' + numRetries + ', max=' + maxRetries + ')');
if (numRetries === 1) {
if (reconnecting) {
Logger.warn(
"Trying to reconnect when already reconnecting... Ignoring this reconnection."
);
Logger.warn('Trying to reconnect when already reconnecting... Ignoring this reconnection.');
return;
} else {
reconnecting = true;
@ -101,24 +94,22 @@ function WebSocketWithReconnection(config) {
}
function addReconnectionQueryParamsIfMissing(uriString) {
var searchParams = new URLSearchParams((new URL(uriString)).search);
if (!searchParams.has("reconnect")) {
uriString = (Array.from(searchParams).length > 0) ? (uriString + '&reconnect=true') : (uriString + '?reconnect=true');
var searchParams = new URLSearchParams(new URL(uriString).search);
if (!searchParams.has('reconnect')) {
uriString = Array.from(searchParams).length > 0 ? uriString + '&reconnect=true' : uriString + '?reconnect=true';
}
return uriString;
}
function reconnectAux(maxRetries, numRetries) {
Logger.debug("Reconnection attempt #" + numRetries);
Logger.debug('Reconnection attempt #' + numRetries);
ws.close(4104, 'Connection closed for reconnection');
wsUri = addReconnectionQueryParamsIfMissing(wsUri);
ws = new WebSocket(wsUri);
ws.onopen = () => {
Logger.debug(
"Reconnected to " + wsUri + " after " + numRetries + " attempts..."
);
Logger.debug('Reconnected to ' + wsUri + ' after ' + numRetries + ' attempts...');
reconnecting = false;
registerMessageHandler();
if (config.onreconnected()) {
@ -127,8 +118,8 @@ function WebSocketWithReconnection(config) {
ws.onclose = reconnectionOnClose;
};
ws.onerror = error => {
Logger.warn("Reconnection error: ", error);
ws.onerror = (error) => {
Logger.warn('Reconnection error: ', error);
if (numRetries === maxRetries) {
if (config.ondisconnect) {
config.ondisconnect();
@ -147,11 +138,11 @@ function WebSocketWithReconnection(config) {
};
this.reconnectWs = () => {
Logger.debug("reconnectWs");
Logger.debug('reconnectWs');
reconnect(MAX_RETRIES, 1);
};
this.send = message => {
this.send = (message) => {
ws.send(message);
};
@ -164,7 +155,7 @@ function WebSocketWithReconnection(config) {
this.getReadyState = () => {
return ws.readyState;
}
};
}
module.exports = WebSocketWithReconnection;
module.exports = WebSocketWithReconnection;

View File

@ -11,39 +11,34 @@
* @return {String} - the stringified JsonRPC 2.0 message
*/
function pack(message, id) {
var result = {
jsonrpc: "2.0"
};
var result = {
jsonrpc: '2.0'
};
// Request
if (message.method) {
result.method = message.method;
// Request
if (message.method) {
result.method = message.method;
if (message.params)
result.params = message.params;
if (message.params) result.params = message.params;
// Request is a notification
if (id != undefined)
result.id = id;
}
// Request is a notification
if (id != undefined) result.id = id;
}
// Response
else if (id != undefined) {
if (message.error) {
if (message.result !== undefined)
throw new TypeError("Both result and error are defined");
// Response
else if (id != undefined) {
if (message.error) {
if (message.result !== undefined) throw new TypeError('Both result and error are defined');
result.error = message.error;
} else if (message.result !== undefined)
result.result = message.result;
else
throw new TypeError("No result or error is defined");
result.error = message.error;
} else if (message.result !== undefined) result.result = message.result;
else throw new TypeError('No result or error is defined');
result.id = id;
};
result.id = id;
}
return JSON.stringify(result);
};
return JSON.stringify(result);
}
/**
* Unpack a JsonRPC 2.0 message
@ -55,41 +50,36 @@ function pack(message, id) {
* @return {Object} - object filled with the JsonRPC 2.0 message content
*/
function unpack(message) {
var result = message;
var result = message;
if (typeof message === 'string' || message instanceof String) {
result = JSON.parse(message);
}
if (typeof message === 'string' || message instanceof String) {
result = JSON.parse(message);
}
// Check if it's a valid message
// Check if it's a valid message
var version = result.jsonrpc;
if (version !== '2.0')
throw new TypeError("Invalid JsonRPC version '" + version + "': " + message);
var version = result.jsonrpc;
if (version !== '2.0') throw new TypeError("Invalid JsonRPC version '" + version + "': " + message);
// Response
if (result.method == undefined) {
if (result.id == undefined)
throw new TypeError("Invalid message: " + message);
// Response
if (result.method == undefined) {
if (result.id == undefined) throw new TypeError('Invalid message: ' + message);
var result_defined = result.result !== undefined;
var error_defined = result.error !== undefined;
var result_defined = result.result !== undefined;
var error_defined = result.error !== undefined;
// Check only result or error is defined, not both or none
if (result_defined && error_defined)
throw new TypeError("Both result and error are defined: " + message);
// Check only result or error is defined, not both or none
if (result_defined && error_defined) throw new TypeError('Both result and error are defined: ' + message);
if (!result_defined && !error_defined)
throw new TypeError("No result or error is defined: " + message);
if (!result_defined && !error_defined) throw new TypeError('No result or error is defined: ' + message);
result.ack = result.id;
delete result.id;
}
// Return unpacked message
return result;
};
result.ack = result.id;
delete result.id;
}
// Return unpacked message
return result;
}
exports.pack = pack;
exports.unpack = unpack;
exports.unpack = unpack;

View File

@ -1,10 +1,10 @@
function pack(message) {
throw new TypeError("Not yet implemented");
};
throw new TypeError('Not yet implemented');
}
function unpack(message) {
throw new TypeError("Not yet implemented");
};
throw new TypeError('Not yet implemented');
}
exports.pack = pack;
exports.unpack = unpack;
exports.unpack = unpack;

View File

@ -1,6 +1,5 @@
var JsonRPC = require('./JsonRPC');
var XmlRPC = require('./XmlRPC');
exports.JsonRPC = JsonRPC;
exports.XmlRPC = XmlRPC;
exports.XmlRPC = XmlRPC;

View File

@ -1,42 +1,41 @@
type ConsoleFunction = (...data: any) => void;
export class ConsoleLogger {
/**
* @hidden
*/
logger: Console;
/**
* @hidden
*/
logger: Console
log: ConsoleFunction;
/**
* @hidden
*/
log: ConsoleFunction
info: ConsoleFunction;
/**
* @hidden
*/
info: ConsoleFunction
debug: ConsoleFunction;
/**
* @hidden
*/
debug: ConsoleFunction
warn: ConsoleFunction;
/**
* @hidden
*/
warn: ConsoleFunction
/**
* @hidden
*/
error: ConsoleFunction
error: ConsoleFunction;
constructor(console: Console) {
this.logger = console;
this.log = console.log,
this.info = console.info,
this.debug = console.debug,
this.warn = console.warn,
this.error = console.error
(this.log = console.log),
(this.info = console.info),
(this.debug = console.debug),
(this.warn = console.warn),
(this.error = console.error);
}
}

View File

@ -1,283 +1,285 @@
import { JL } from 'jsnlog'
import { OpenVidu } from "../../OpenVidu/OpenVidu";
import { JL } from 'jsnlog';
import { OpenVidu } from '../../OpenVidu/OpenVidu';
import { ConsoleLogger } from './ConsoleLogger';
import { OpenViduLoggerConfiguration } from "./OpenViduLoggerConfiguration";
import { OpenViduLoggerConfiguration } from './OpenViduLoggerConfiguration';
export class OpenViduLogger {
private static instance: OpenViduLogger;
private static instance: OpenViduLogger;
private JSNLOG_URL: string = '/openvidu/elk/openvidu-browser-logs';
private MAX_JSNLOG_BATCH_LOG_MESSAGES: number = 100;
private MAX_MSECONDS_BATCH_MESSAGES: number = 5000;
private MAX_LENGTH_STRING_JSON: number = 1000;
private JSNLOG_URL: string = "/openvidu/elk/openvidu-browser-logs";
private MAX_JSNLOG_BATCH_LOG_MESSAGES: number = 100;
private MAX_MSECONDS_BATCH_MESSAGES: number = 5000;
private MAX_LENGTH_STRING_JSON: number = 1000;
private defaultConsoleLogger: ConsoleLogger = new ConsoleLogger(globalThis.console);
private defaultConsoleLogger: ConsoleLogger = new ConsoleLogger(globalThis.console);
private currentAppender: any;
private currentAppender: any;
private isProdMode = false;
private isJSNLogSetup = false;
private isProdMode = false;
private isJSNLogSetup = false;
// This two variables are used to restart JSNLog
// on different sessions and different userIds
private loggingSessionId: string | undefined;
// This two variables are used to restart JSNLog
// on different sessions and different userIds
private loggingSessionId: string | undefined;
/**
* @hidden
*/
static configureJSNLog(openVidu: OpenVidu, token: string) {
try {
// If dev mode or...
if (
globalThis['LOG_JSNLOG_RESULTS'] ||
// If instance is created and it is OpenVidu Pro
(this.instance &&
openVidu.isAtLeastPro &&
// If logs are enabled
this.instance.isOpenViduBrowserLogsDebugActive(openVidu) &&
// Only reconfigure it if session or finalUserId has changed
this.instance.canConfigureJSNLog(openVidu, this.instance))
) {
// Check if app logs can be sent
// and replace console.log function to send
// logs of the application
if (openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app) {
this.instance.replaceWindowConsole();
}
/**
* @hidden
*/
static configureJSNLog(openVidu: OpenVidu, token: string) {
try {
// If dev mode or...
if ((globalThis['LOG_JSNLOG_RESULTS']) ||
// If instance is created and it is OpenVidu Pro
(this.instance && openVidu.isAtLeastPro
// If logs are enabled
&& this.instance.isOpenViduBrowserLogsDebugActive(openVidu)
// Only reconfigure it if session or finalUserId has changed
&& this.instance.canConfigureJSNLog(openVidu, this.instance))) {
// isJSNLogSetup will not be true until completed setup
this.instance.isJSNLogSetup = false;
this.instance.info('Configuring JSNLogs.');
// Check if app logs can be sent
// and replace console.log function to send
// logs of the application
if (openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app) {
this.instance.replaceWindowConsole();
}
const finalUserId = openVidu.finalUserId;
const sessionId = openVidu.session.sessionId;
// isJSNLogSetup will not be true until completed setup
this.instance.isJSNLogSetup = false;
this.instance.info("Configuring JSNLogs.");
const beforeSendCallback = (xhr) => {
// If 401 or 403 or 404 modify ready and status so JSNLog don't retry to send logs
// https://github.com/mperdeck/jsnlog.js/blob/v2.30.0/jsnlog.ts#L805-L818
const parentReadyStateFunction = xhr.onreadystatechange;
xhr.onreadystatechange = () => {
if (this.isInvalidResponse(xhr)) {
Object.defineProperty(xhr, 'readyState', { value: 4 });
Object.defineProperty(xhr, 'status', { value: 200 });
// Disable JSNLog too to not send periodically errors
this.instance.disableLogger();
}
parentReadyStateFunction();
};
const finalUserId = openVidu.finalUserId;
const sessionId = openVidu.session.sessionId;
// Headers to identify and authenticate logs
xhr.setRequestHeader('Authorization', 'Basic ' + btoa(`${finalUserId}%/%${sessionId}` + ':' + token));
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
// Additional headers for OpenVidu
xhr.setRequestHeader('OV-Final-User-Id', finalUserId);
xhr.setRequestHeader('OV-Session-Id', sessionId);
xhr.setRequestHeader('OV-Token', token);
};
const beforeSendCallback = (xhr) => {
// If 401 or 403 or 404 modify ready and status so JSNLog don't retry to send logs
// https://github.com/mperdeck/jsnlog.js/blob/v2.30.0/jsnlog.ts#L805-L818
const parentReadyStateFunction = xhr.onreadystatechange;
xhr.onreadystatechange = () => {
if (this.isInvalidResponse(xhr)) {
Object.defineProperty(xhr, "readyState", { value: 4 });
Object.defineProperty(xhr, "status", { value: 200 });
// Disable JSNLog too to not send periodically errors
this.instance.disableLogger();
}
parentReadyStateFunction();
}
// Creation of the appender.
this.instance.currentAppender = JL.createAjaxAppender(`appender-${finalUserId}-${sessionId}`);
this.instance.currentAppender.setOptions({
beforeSend: beforeSendCallback,
maxBatchSize: 1000,
batchSize: this.instance.MAX_JSNLOG_BATCH_LOG_MESSAGES,
batchTimeout: this.instance.MAX_MSECONDS_BATCH_MESSAGES
});
// Headers to identify and authenticate logs
xhr.setRequestHeader('Authorization', "Basic " + btoa(`${finalUserId}%/%${sessionId}` + ":" + token));
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest')
// Additional headers for OpenVidu
xhr.setRequestHeader('OV-Final-User-Id', finalUserId);
xhr.setRequestHeader('OV-Session-Id', sessionId);
xhr.setRequestHeader('OV-Token', token);
}
// Avoid circular dependencies
const logSerializer = (obj): string => {
const getCircularReplacer = () => {
const seen = new WeakSet();
return (key, value) => {
if (typeof value === 'object' && value != null) {
if (seen.has(value) || (globalThis.HTMLElement && value instanceof HTMLElement)) {
return;
}
seen.add(value);
}
return value;
};
};
// Creation of the appender.
this.instance.currentAppender = JL.createAjaxAppender(`appender-${finalUserId}-${sessionId}`);
this.instance.currentAppender.setOptions({
beforeSend: beforeSendCallback,
maxBatchSize: 1000,
batchSize: this.instance.MAX_JSNLOG_BATCH_LOG_MESSAGES,
batchTimeout: this.instance.MAX_MSECONDS_BATCH_MESSAGES
});
// Cut long messages
let stringifyJson = JSON.stringify(obj, getCircularReplacer());
if (stringifyJson.length > this.instance.MAX_LENGTH_STRING_JSON) {
stringifyJson = `${stringifyJson.substring(0, this.instance.MAX_LENGTH_STRING_JSON)}...`;
}
// Avoid circular dependencies
const logSerializer = (obj): string => {
const getCircularReplacer = () => {
const seen = new WeakSet();
return (key, value) => {
if (typeof value === "object" && value != null) {
if (seen.has(value) || (globalThis.HTMLElement && value instanceof HTMLElement)) {
return;
}
seen.add(value);
}
return value;
};
};
if (globalThis['LOG_JSNLOG_RESULTS']) {
console.log(stringifyJson);
}
// Cut long messages
let stringifyJson = JSON.stringify(obj, getCircularReplacer());
if (stringifyJson.length > this.instance.MAX_LENGTH_STRING_JSON) {
stringifyJson = `${stringifyJson.substring(0, this.instance.MAX_LENGTH_STRING_JSON)}...`;
}
return stringifyJson;
};
if (globalThis['LOG_JSNLOG_RESULTS']) {
console.log(stringifyJson);
}
// Initialize JL to send logs
JL.setOptions({
defaultAjaxUrl: openVidu.httpUri + this.instance.JSNLOG_URL,
serialize: logSerializer,
enabled: true
});
JL().setOptions({
appenders: [this.instance.currentAppender]
});
return stringifyJson;
};
this.instance.isJSNLogSetup = true;
this.instance.loggingSessionId = sessionId;
this.instance.info('JSNLog configured.');
}
} catch (e) {
// Print error
console.error('Error configuring JSNLog: ');
console.error(e);
// Restore defaults values just in case any exception happen-
this.instance.disableLogger();
}
}
// Initialize JL to send logs
JL.setOptions({
defaultAjaxUrl: openVidu.httpUri + this.instance.JSNLOG_URL,
serialize: logSerializer,
enabled: true
});
JL().setOptions({
appenders: [this.instance.currentAppender]
});
/**
* @hidden
*/
static getInstance(): OpenViduLogger {
if (!OpenViduLogger.instance) {
OpenViduLogger.instance = new OpenViduLogger();
}
return OpenViduLogger.instance;
}
this.instance.isJSNLogSetup = true;
this.instance.loggingSessionId = sessionId;
this.instance.info("JSNLog configured.");
}
} catch (e) {
// Print error
console.error("Error configuring JSNLog: ");
console.error(e);
// Restore defaults values just in case any exception happen-
this.instance.disableLogger();
}
}
private static isInvalidResponse(xhr: XMLHttpRequest) {
return xhr.status == 401 || xhr.status == 403 || xhr.status == 404 || xhr.status == 0;
}
/**
* @hidden
*/
static getInstance(): OpenViduLogger {
if (!OpenViduLogger.instance) {
OpenViduLogger.instance = new OpenViduLogger();
}
return OpenViduLogger.instance;
}
private canConfigureJSNLog(openVidu: OpenVidu, logger: OpenViduLogger): boolean {
return openVidu.session.sessionId != logger.loggingSessionId;
}
private static isInvalidResponse(xhr: XMLHttpRequest) {
return xhr.status == 401 || xhr.status == 403 || xhr.status == 404 || xhr.status == 0;
}
private isOpenViduBrowserLogsDebugActive(openVidu: OpenVidu) {
return (
openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug ||
openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app
);
}
private canConfigureJSNLog(openVidu: OpenVidu, logger: OpenViduLogger): boolean {
return openVidu.session.sessionId != logger.loggingSessionId
}
// Return console functions with jsnlog integration
private getConsoleWithJSNLog() {
return (function (openViduLogger: OpenViduLogger) {
return {
log: function (...args) {
openViduLogger.defaultConsoleLogger.log.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
info: function (...args) {
openViduLogger.defaultConsoleLogger.info.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
debug: function (...args) {
openViduLogger.defaultConsoleLogger.debug.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
},
warn: function (...args) {
openViduLogger.defaultConsoleLogger.warn.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().warn(arguments);
}
},
error: function (...args) {
openViduLogger.defaultConsoleLogger.error.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().error(arguments);
}
}
};
})(this);
}
private isOpenViduBrowserLogsDebugActive(openVidu: OpenVidu) {
return openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug ||
openVidu.sendBrowserLogs === OpenViduLoggerConfiguration.debug_app;
}
private replaceWindowConsole() {
globalThis.console = this.defaultConsoleLogger.logger;
globalThis.console.log = this.getConsoleWithJSNLog().log;
globalThis.console.info = this.getConsoleWithJSNLog().info;
globalThis.console.debug = this.getConsoleWithJSNLog().debug;
globalThis.console.warn = this.getConsoleWithJSNLog().warn;
globalThis.console.error = this.getConsoleWithJSNLog().error;
}
// Return console functions with jsnlog integration
private getConsoleWithJSNLog() {
return function (openViduLogger: OpenViduLogger) {
return {
log: function (...args) {
openViduLogger.defaultConsoleLogger.log.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
info: function (...args) {
openViduLogger.defaultConsoleLogger.info.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().info(arguments);
}
},
debug: function (...args) {
openViduLogger.defaultConsoleLogger.debug.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
},
warn: function (...args) {
openViduLogger.defaultConsoleLogger.warn.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().warn(arguments);
}
},
error: function (...args) {
openViduLogger.defaultConsoleLogger.error.apply(openViduLogger.defaultConsoleLogger.logger, arguments);
if (openViduLogger.isJSNLogSetup) {
JL().error(arguments);
}
}
};
}(this);
}
private disableLogger() {
JL.setOptions({ enabled: false });
this.isJSNLogSetup = false;
this.loggingSessionId = undefined;
this.currentAppender = undefined;
globalThis.console = this.defaultConsoleLogger.logger;
globalThis.console.log = this.defaultConsoleLogger.log;
globalThis.console.info = this.defaultConsoleLogger.info;
globalThis.console.debug = this.defaultConsoleLogger.debug;
globalThis.console.warn = this.defaultConsoleLogger.warn;
globalThis.console.error = this.defaultConsoleLogger.error;
}
private replaceWindowConsole() {
globalThis.console = this.defaultConsoleLogger.logger;
globalThis.console.log = this.getConsoleWithJSNLog().log;
globalThis.console.info = this.getConsoleWithJSNLog().info;
globalThis.console.debug = this.getConsoleWithJSNLog().debug;
globalThis.console.warn = this.getConsoleWithJSNLog().warn;
globalThis.console.error = this.getConsoleWithJSNLog().error;
}
/**
* @hidden
*/
log(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.log.apply(this.defaultConsoleLogger.logger, arguments);
}
if (this.isJSNLogSetup) {
JL().info(arguments);
}
}
private disableLogger() {
JL.setOptions({ enabled: false });
this.isJSNLogSetup = false;
this.loggingSessionId = undefined;
this.currentAppender = undefined;
globalThis.console = this.defaultConsoleLogger.logger;
globalThis.console.log = this.defaultConsoleLogger.log;
globalThis.console.info = this.defaultConsoleLogger.info;
globalThis.console.debug = this.defaultConsoleLogger.debug;
globalThis.console.warn = this.defaultConsoleLogger.warn;
globalThis.console.error = this.defaultConsoleLogger.error;
}
/**
* @hidden
*/
debug(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.debug.apply(this.defaultConsoleLogger.logger, arguments);
}
}
/**
* @hidden
*/
log(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.log.apply(this.defaultConsoleLogger.logger, arguments);
}
if (this.isJSNLogSetup) {
JL().info(arguments);
}
}
/**
* @hidden
*/
info(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.info.apply(this.defaultConsoleLogger.logger, arguments);
}
if (this.isJSNLogSetup) {
JL().info(arguments);
}
}
/**
* @hidden
*/
debug(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.debug.apply(this.defaultConsoleLogger.logger, arguments);
}
}
/**
* @hidden
*/
warn(...args: any[]) {
this.defaultConsoleLogger.warn.apply(this.defaultConsoleLogger.logger, arguments);
if (this.isJSNLogSetup) {
JL().warn(arguments);
}
}
/**
* @hidden
*/
info(...args: any[]) {
if (!this.isProdMode) {
this.defaultConsoleLogger.info.apply(this.defaultConsoleLogger.logger, arguments);
}
if (this.isJSNLogSetup) {
JL().info(arguments);
}
}
/**
* @hidden
*/
error(...args: any[]) {
this.defaultConsoleLogger.error.apply(this.defaultConsoleLogger.logger, arguments);
if (this.isJSNLogSetup) {
JL().error(arguments);
}
}
/**
* @hidden
*/
warn(...args: any[]) {
this.defaultConsoleLogger.warn.apply(this.defaultConsoleLogger.logger, arguments);
if (this.isJSNLogSetup) {
JL().warn(arguments);
}
}
/**
* @hidden
*/
error(...args: any[]) {
this.defaultConsoleLogger.error.apply(this.defaultConsoleLogger.logger, arguments);
if (this.isJSNLogSetup) {
JL().error(arguments);
}
}
/**
* @hidden
*/
flush() {
if (this.isJSNLogSetup && this.currentAppender != null) {
this.currentAppender.sendBatch();
}
}
enableProdMode() {
this.isProdMode = true;
}
/**
* @hidden
*/
flush() {
if (this.isJSNLogSetup && this.currentAppender != null) {
this.currentAppender.sendBatch();
}
}
enableProdMode() {
this.isProdMode = true;
}
}

View File

@ -2,4 +2,4 @@ export enum OpenViduLoggerConfiguration {
disabled = 'disabled',
debug = 'debug',
debug_app = 'debug_app'
}
}

View File

@ -54,7 +54,11 @@ globalThis.getScreenId = function (firefoxString, callback, custom_parameter) {
if (event.data.chromeMediaSourceId === 'PermissionDeniedError') {
callback('permission-denied');
} else {
callback(null, event.data.chromeMediaSourceId, getScreenConstraints(null, event.data.chromeMediaSourceId, event.data.canRequestAudioTrack));
callback(
null,
event.data.chromeMediaSourceId,
getScreenConstraints(null, event.data.chromeMediaSourceId, event.data.canRequestAudioTrack)
);
}
// this event listener is no more needed
@ -71,8 +75,7 @@ globalThis.getScreenId = function (firefoxString, callback, custom_parameter) {
if (!custom_parameter) {
setTimeout(postGetSourceIdMessage, 100);
}
else {
} else {
setTimeout(function () {
postGetSourceIdMessage(custom_parameter);
}, 100);
@ -95,7 +98,7 @@ function getScreenConstraints(error, sourceId, canRequestAudioTrack) {
if (!!canRequestAudioTrack) {
screen_constraints.audio = {
mandatory: {
chromeMediaSource: error ? 'screen' : 'desktop',
chromeMediaSource: error ? 'screen' : 'desktop'
// echoCancellation: true
},
optional: []
@ -129,19 +132,26 @@ function postGetSourceIdMessage(custom_parameter) {
}
if (!custom_parameter) {
iframe.contentWindow.postMessage({
captureSourceId: true
}, '*');
}
else if (!!custom_parameter.forEach) {
iframe.contentWindow.postMessage({
captureCustomSourceId: custom_parameter
}, '*');
}
else {
iframe.contentWindow.postMessage({
captureSourceIdWithAudio: true
}, '*');
iframe.contentWindow.postMessage(
{
captureSourceId: true
},
'*'
);
} else if (!!custom_parameter.forEach) {
iframe.contentWindow.postMessage(
{
captureCustomSourceId: custom_parameter
},
'*'
);
} else {
iframe.contentWindow.postMessage(
{
captureSourceIdWithAudio: true
},
'*'
);
}
}
@ -212,9 +222,12 @@ function postGetChromeExtensionStatusMessage() {
return;
}
iframe.contentWindow.postMessage({
getChromeExtensionStatus: true
}, '*');
iframe.contentWindow.postMessage(
{
getChromeExtensionStatus: true
},
'*'
);
}
exports.getScreenId = globalThis.getScreenId;

View File

@ -3,7 +3,7 @@ var chromeMediaSource = 'screen';
var sourceId;
var screenCallback;
if(typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof navigator.userAgent !== 'undefined'){
if (typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof navigator.userAgent !== 'undefined') {
var isFirefox = typeof window.InstallTrigger !== 'undefined';
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera;
@ -20,10 +20,8 @@ if(typeof window !== 'undefined' && typeof navigator !== 'undefined' && typeof n
function onMessageCallback(data) {
// "cancel" button is clicked
if (data == 'PermissionDeniedError') {
if (screenCallback)
return screenCallback('PermissionDeniedError');
else
throw new Error('PermissionDeniedError');
if (screenCallback) return screenCallback('PermissionDeniedError');
else throw new Error('PermissionDeniedError');
}
// extension notified his presence
if (data == 'rtcmulticonnection-extension-loaded') {
@ -31,7 +29,7 @@ function onMessageCallback(data) {
}
// extension shared temp sourceId
if (data.sourceId && screenCallback) {
screenCallback(sourceId = data.sourceId, data.canRequestAudioTrack === true);
screenCallback((sourceId = data.sourceId), data.canRequestAudioTrack === true);
}
}
@ -51,10 +49,8 @@ function isChromeExtensionAvailable(callback) {
// this function can be used to get "source-id" from the extension
function getSourceId(callback) {
if (!callback)
throw '"callback" parameter is mandatory.';
if (sourceId)
return callback(sourceId);
if (!callback) throw '"callback" parameter is mandatory.';
if (sourceId) return callback(sourceId);
screenCallback = callback;
window.postMessage('get-sourceId', '*');
}
@ -67,9 +63,12 @@ function getCustomSourceId(arr, callback) {
if (sourceId) return callback(sourceId);
screenCallback = callback;
window.postMessage({
'get-custom-sourceId': arr
}, '*');
window.postMessage(
{
'get-custom-sourceId': arr
},
'*'
);
}
// this function can be used to get "source-id" from the extension
@ -82,8 +81,7 @@ function getSourceIdWithAudio(callback) {
}
function getChromeExtensionStatus(extensionid, callback) {
if (isFirefox)
return callback('not-chrome');
if (isFirefox) return callback('not-chrome');
if (arguments.length != 2) {
callback = extensionid;
extensionid = 'lfcgfepafnobdloecchnfaclibenjold'; // default extension-id
@ -96,8 +94,7 @@ function getChromeExtensionStatus(extensionid, callback) {
setTimeout(function () {
if (chromeMediaSource == 'screen') {
callback('installed-disabled');
} else
callback('installed-enabled');
} else callback('installed-enabled');
}, 2000);
};
image.onerror = function () {
@ -116,8 +113,7 @@ function getScreenConstraints(callback, captureSourceIdWithAudio) {
mozMediaSource: 'window',
mediaSource: 'window'
};
if (isFirefox)
return callback(null, firefoxScreenConstraints);
if (isFirefox) return callback(null, firefoxScreenConstraints);
// this statement defines getUserMedia constraints
// that will be used to capture content of screen
var screen_constraints = {
@ -141,8 +137,7 @@ function getScreenConstraints(callback, captureSourceIdWithAudio) {
}
callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints);
});
}
else {
} else {
getSourceId(function (sourceId) {
screen_constraints.mandatory.chromeMediaSourceId = sourceId;
callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints);
@ -164,4 +159,4 @@ exports.getScreenConstraints = getScreenConstraints;
exports.getScreenConstraintsWithAudio = getScreenConstraintsWithAudio;
exports.isChromeExtensionAvailable = isChromeExtensionAvailable;
exports.getChromeExtensionStatus = getChromeExtensionStatus;
exports.getSourceId = getSourceId;
exports.getSourceId = getSourceId;

View File

@ -1,222 +1,221 @@
import platform = require('platform');
export class PlatformUtils {
protected static instance: PlatformUtils;
constructor() { }
protected static instance: PlatformUtils;
constructor() {}
static getInstance(): PlatformUtils {
if (!this.instance) {
this.instance = new PlatformUtils();
}
return PlatformUtils.instance;
}
static getInstance(): PlatformUtils {
if (!this.instance) {
this.instance = new PlatformUtils();
}
return PlatformUtils.instance;
}
public isChromeBrowser(): boolean {
return platform.name === "Chrome";
}
public isChromeBrowser(): boolean {
return platform.name === 'Chrome';
}
/**
* @hidden
*/
public isSafariBrowser(): boolean {
return platform.name === "Safari";
}
/**
* @hidden
*/
public isSafariBrowser(): boolean {
return platform.name === 'Safari';
}
/**
* @hidden
*/
public isChromeMobileBrowser(): boolean {
return platform.name === "Chrome Mobile";
}
/**
* @hidden
*/
public isChromeMobileBrowser(): boolean {
return platform.name === 'Chrome Mobile';
}
/**
* @hidden
*/
public isFirefoxBrowser(): boolean {
return platform.name === "Firefox";
}
/**
* @hidden
*/
public isFirefoxBrowser(): boolean {
return platform.name === 'Firefox';
}
/**
* @hidden
*/
public isFirefoxMobileBrowser(): boolean {
return platform.name === "Firefox Mobile" || platform.name === "Firefox for iOS";
}
/**
* @hidden
*/
public isFirefoxMobileBrowser(): boolean {
return platform.name === 'Firefox Mobile' || platform.name === 'Firefox for iOS';
}
/**
* @hidden
*/
public isOperaBrowser(): boolean {
return platform.name === "Opera";
}
/**
* @hidden
*/
public isOperaBrowser(): boolean {
return platform.name === 'Opera';
}
/**
* @hidden
*/
public isOperaMobileBrowser(): boolean {
return platform.name === "Opera Mobile";
}
/**
* @hidden
*/
public isOperaMobileBrowser(): boolean {
return platform.name === 'Opera Mobile';
}
/**
* @hidden
*/
public isEdgeBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === "Microsoft Edge" && version >= 80;
}
/**
* @hidden
*/
public isEdgeBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === 'Microsoft Edge' && version >= 80;
}
/**
* @hidden
*/
public isEdgeMobileBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === "Microsoft Edge" && (platform.os?.family === 'Android' || platform.os?.family === 'iOS') && version > 45;
}
/**
* @hidden
*/
public isEdgeMobileBrowser(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
return platform.name === 'Microsoft Edge' && (platform.os?.family === 'Android' || platform.os?.family === 'iOS') && version > 45;
}
/**
* @hidden
*/
public isAndroidBrowser(): boolean {
return platform.name === "Android Browser";
}
/**
* @hidden
*/
public isAndroidBrowser(): boolean {
return platform.name === 'Android Browser';
}
/**
* @hidden
*/
public isElectron(): boolean {
return platform.name === "Electron";
}
/**
* @hidden
*/
public isElectron(): boolean {
return platform.name === 'Electron';
}
/**
* @hidden
*/
public isNodeJs(): boolean {
return platform.name === "Node.js";
}
/**
* @hidden
*/
public isNodeJs(): boolean {
return platform.name === 'Node.js';
}
/**
* @hidden
*/
public isSamsungBrowser(): boolean {
return (
platform.name === "Samsung Internet Mobile" ||
platform.name === "Samsung Internet"
);
}
/**
* @hidden
*/
public isSamsungBrowser(): boolean {
return platform.name === 'Samsung Internet Mobile' || platform.name === 'Samsung Internet';
}
/**
* @hidden
*/
public isIPhoneOrIPad(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
const isTouchable = "ontouchend" in document;
const isIPad = /\b(\w*Macintosh\w*)\b/.test(userAgent) && isTouchable;
const isIPhone =
/\b(\w*iPhone\w*)\b/.test(userAgent) &&
/\b(\w*Mobile\w*)\b/.test(userAgent) &&
isTouchable;
return isIPad || isIPhone;
}
/**
* @hidden
*/
public isIPhoneOrIPad(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
const isTouchable = 'ontouchend' in document;
const isIPad = /\b(\w*Macintosh\w*)\b/.test(userAgent) && isTouchable;
const isIPhone = /\b(\w*iPhone\w*)\b/.test(userAgent) && /\b(\w*Mobile\w*)\b/.test(userAgent) && isTouchable;
return isIPad || isIPhone;
}
/**
* @hidden
*/
public isIOSWithSafari(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
return this.isIPhoneOrIPad() && (
/\b(\w*Apple\w*)\b/.test(navigator.vendor) &&
/\b(\w*Safari\w*)\b/.test(userAgent) &&
!/\b(\w*CriOS\w*)\b/.test(userAgent) &&
!/\b(\w*FxiOS\w*)\b/.test(userAgent)
);
}
/**
* @hidden
*/
public isIOSWithSafari(): boolean {
const userAgent = !!platform.ua ? platform.ua : navigator.userAgent;
return (
this.isIPhoneOrIPad() &&
/\b(\w*Apple\w*)\b/.test(navigator.vendor) &&
/\b(\w*Safari\w*)\b/.test(userAgent) &&
!/\b(\w*CriOS\w*)\b/.test(userAgent) &&
!/\b(\w*FxiOS\w*)\b/.test(userAgent)
);
}
/**
* @hidden
*/
public isIonicIos(): boolean {
return this.isIPhoneOrIPad() && platform.ua!!.indexOf("Safari") === -1;
}
/**
* @hidden
*/
public isIonicIos(): boolean {
return this.isIPhoneOrIPad() && platform.ua!!.indexOf('Safari') === -1;
}
/**
* @hidden
*/
public isIonicAndroid(): boolean {
return (
platform.os!!.family === "Android" && platform.name == "Android Browser"
);
}
/**
* @hidden
*/
public isIonicAndroid(): boolean {
return platform.os!!.family === 'Android' && platform.name == 'Android Browser';
}
/**
* @hidden
*/
public isMobileDevice(): boolean {
return platform.os!!.family === "iOS" || platform.os!!.family === "Android";
}
/**
* @hidden
*/
public isMobileDevice(): boolean {
return platform.os!!.family === 'iOS' || platform.os!!.family === 'Android';
}
/**
* @hidden
*/
public isReactNative(): boolean {
return false;
}
/**
* @hidden
*/
public isReactNative(): boolean {
return false;
}
/**
* @hidden
*/
public isChromium(): boolean {
return this.isChromeBrowser() || this.isChromeMobileBrowser() ||
this.isOperaBrowser() || this.isOperaMobileBrowser() ||
this.isEdgeBrowser() || this.isEdgeMobileBrowser() ||
this.isSamsungBrowser() ||
this.isIonicAndroid() || this.isIonicIos() ||
this.isElectron();
}
/**
* @hidden
*/
public isChromium(): boolean {
return (
this.isChromeBrowser() ||
this.isChromeMobileBrowser() ||
this.isOperaBrowser() ||
this.isOperaMobileBrowser() ||
this.isEdgeBrowser() ||
this.isEdgeMobileBrowser() ||
this.isSamsungBrowser() ||
this.isIonicAndroid() ||
this.isIonicIos() ||
this.isElectron()
);
}
/**
* @hidden
*/
public canScreenShare(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
// Reject mobile devices
if (this.isMobileDevice()) {
return false;
}
return (
this.isChromeBrowser() ||
this.isFirefoxBrowser() ||
this.isOperaBrowser() ||
this.isElectron() ||
this.isEdgeBrowser() ||
(this.isSafariBrowser() && version >= 13)
);
}
/**
* @hidden
*/
public canScreenShare(): boolean {
const version = platform?.version ? parseFloat(platform.version) : -1;
// Reject mobile devices
if (this.isMobileDevice()) {
return false;
}
return (
this.isChromeBrowser() ||
this.isFirefoxBrowser() ||
this.isOperaBrowser() ||
this.isElectron() ||
this.isEdgeBrowser() ||
(this.isSafariBrowser() && version >= 13)
);
}
/**
* @hidden
*/
public getName(): string {
return platform.name || "";
}
/**
* @hidden
*/
public getName(): string {
return platform.name || '';
}
/**
* @hidden
*/
public getVersion(): string {
return platform.version || "";
}
/**
* @hidden
*/
public getVersion(): string {
return platform.version || '';
}
/**
* @hidden
*/
public getFamily(): string {
return platform.os!!.family || "";
}
/**
* @hidden
*/
public getFamily(): string {
return platform.os!!.family || '';
}
/**
* @hidden
*/
public getDescription(): string {
return platform.description || "";
}
/**
* @hidden
*/
public getDescription(): string {
return platform.description || '';
}
}

View File

@ -63,24 +63,17 @@ export class WebRtcPeer {
this.configuration = {
...configuration,
iceServers:
!!configuration.iceServers &&
configuration.iceServers.length > 0
? configuration.iceServers
: freeice(),
mediaStream:
configuration.mediaStream !== undefined
? configuration.mediaStream
: null,
mode: !!configuration.mode ? configuration.mode : "sendrecv",
id: !!configuration.id ? configuration.id : this.generateUniqueId(),
iceServers: !!configuration.iceServers && configuration.iceServers.length > 0 ? configuration.iceServers : freeice(),
mediaStream: configuration.mediaStream !== undefined ? configuration.mediaStream : null,
mode: !!configuration.mode ? configuration.mode : 'sendrecv',
id: !!configuration.id ? configuration.id : this.generateUniqueId()
};
// prettier-ignore
logger.debug(`[WebRtcPeer] configuration:\n${JSON.stringify(this.configuration, null, 2)}`);
this.pc = new RTCPeerConnection({ iceServers: this.configuration.iceServers });
this.pc.addEventListener("icecandidate", (event: RTCPeerConnectionIceEvent) => {
this.pc.addEventListener('icecandidate', (event: RTCPeerConnectionIceEvent) => {
if (event.candidate !== null) {
// `RTCPeerConnectionIceEvent.candidate` is supposed to be an RTCIceCandidate:
// https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectioniceevent-candidate
@ -140,11 +133,11 @@ export class WebRtcPeer {
const hasVideo = this.configuration.mediaConstraints.video;
const options: RTCOfferOptions = {
offerToReceiveAudio: this.configuration.mode !== "sendonly" && hasAudio,
offerToReceiveVideo: this.configuration.mode !== "sendonly" && hasVideo,
offerToReceiveAudio: this.configuration.mode !== 'sendonly' && hasAudio,
offerToReceiveVideo: this.configuration.mode !== 'sendonly' && hasVideo
};
logger.debug("[createOfferLegacy] RTCPeerConnection.createOffer() options:", JSON.stringify(options));
logger.debug('[createOfferLegacy] RTCPeerConnection.createOffer() options:', JSON.stringify(options));
return this.pc.createOffer(options);
}
@ -156,18 +149,18 @@ export class WebRtcPeer {
async createOffer(): Promise<RTCSessionDescriptionInit> {
// TODO: Delete this conditional when all supported browsers are
// modern enough to implement the Transceiver methods.
if (!("addTransceiver" in this.pc)) {
if (!('addTransceiver' in this.pc)) {
logger.warn(
"[createOffer] Method RTCPeerConnection.addTransceiver() is NOT available; using LEGACY offerToReceive{Audio,Video}"
'[createOffer] Method RTCPeerConnection.addTransceiver() is NOT available; using LEGACY offerToReceive{Audio,Video}'
);
return this.createOfferLegacy();
} else {
logger.debug("[createOffer] Method RTCPeerConnection.addTransceiver() is available; using it");
logger.debug('[createOffer] Method RTCPeerConnection.addTransceiver() is available; using it');
}
// Spec doc: https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
if (this.configuration.mode !== "recvonly") {
if (this.configuration.mode !== 'recvonly') {
// To send media, assume that all desired media tracks have been
// already added by higher level code to our MediaStream.
@ -180,24 +173,18 @@ export class WebRtcPeer {
for (const track of this.configuration.mediaStream.getTracks()) {
const tcInit: RTCRtpTransceiverInit = {
direction: this.configuration.mode,
streams: [this.configuration.mediaStream],
streams: [this.configuration.mediaStream]
};
if (track.kind === "video" && this.configuration.simulcast) {
if (track.kind === 'video' && this.configuration.simulcast) {
// Check if the requested size is enough to ask for 3 layers.
const trackSettings = track.getSettings();
const trackConsts = track.getConstraints();
const trackWidth: number =
trackSettings.width ??
(trackConsts.width as ConstrainULongRange).ideal ??
(trackConsts.width as number) ??
0;
trackSettings.width ?? (trackConsts.width as ConstrainULongRange).ideal ?? (trackConsts.width as number) ?? 0;
const trackHeight: number =
trackSettings.height ??
(trackConsts.height as ConstrainULongRange).ideal ??
(trackConsts.height as number) ??
0;
trackSettings.height ?? (trackConsts.height as ConstrainULongRange).ideal ?? (trackConsts.height as number) ?? 0;
logger.info(`[createOffer] Video track dimensions: ${trackWidth}x${trackHeight}`);
const trackPixels = trackWidth * trackHeight;
@ -215,13 +202,13 @@ export class WebRtcPeer {
const layerDiv = 2 ** (maxLayers - l - 1);
const encoding: RTCRtpEncodingParameters = {
rid: "rdiv" + layerDiv.toString(),
rid: 'rdiv' + layerDiv.toString(),
// @ts-ignore -- Property missing from DOM types.
scalabilityMode: "L1T1",
scalabilityMode: 'L1T1'
};
if (["detail", "text"].includes(track.contentHint)) {
if (['detail', 'text'].includes(track.contentHint)) {
// Prioritize best resolution, for maximum picture detail.
encoding.scaleResolutionDownBy = 1.0;
@ -237,22 +224,20 @@ export class WebRtcPeer {
const tc = this.pc.addTransceiver(track, tcInit);
if (track.kind === "video") {
if (track.kind === 'video') {
let sendParams = tc.sender.getParameters();
let needSetParams = false;
if (!sendParams.degradationPreference?.length) {
// degradationPreference for video: "balanced", "maintain-framerate", "maintain-resolution".
// https://www.w3.org/TR/2018/CR-webrtc-20180927/#dom-rtcdegradationpreference
if (["detail", "text"].includes(track.contentHint)) {
sendParams.degradationPreference = "maintain-resolution";
if (['detail', 'text'].includes(track.contentHint)) {
sendParams.degradationPreference = 'maintain-resolution';
} else {
sendParams.degradationPreference = "balanced";
sendParams.degradationPreference = 'balanced';
}
logger.info(
`[createOffer] Video sender Degradation Preference set: ${sendParams.degradationPreference}`
);
logger.info(`[createOffer] Video sender Degradation Preference set: ${sendParams.degradationPreference}`);
// FIXME: Firefox implements degradationPreference on each individual encoding!
// (set it on every element of the sendParams.encodings array)
@ -310,7 +295,7 @@ export class WebRtcPeer {
}
} else {
// To just receive media, create new recvonly transceivers.
for (const kind of ["audio", "video"]) {
for (const kind of ['audio', 'video']) {
// Check if the media kind should be used.
if (!this.configuration.mediaConstraints[kind]) {
continue;
@ -319,7 +304,7 @@ export class WebRtcPeer {
this.configuration.mediaStream = new MediaStream();
this.pc.addTransceiver(kind, {
direction: this.configuration.mode,
streams: [this.configuration.mediaStream],
streams: [this.configuration.mediaStream]
});
}
}
@ -352,23 +337,21 @@ export class WebRtcPeer {
return new Promise((resolve, reject) => {
// TODO: Delete this conditional when all supported browsers are
// modern enough to implement the Transceiver methods.
if ("getTransceivers" in this.pc) {
logger.debug("[createAnswer] Method RTCPeerConnection.getTransceivers() is available; using it");
if ('getTransceivers' in this.pc) {
logger.debug('[createAnswer] Method RTCPeerConnection.getTransceivers() is available; using it');
// Ensure that the PeerConnection already contains one Transceiver
// for each kind of media.
// The Transceivers should have been already created internally by
// the PC itself, when `pc.setRemoteDescription(sdpOffer)` was called.
for (const kind of ["audio", "video"]) {
for (const kind of ['audio', 'video']) {
// Check if the media kind should be used.
if (!this.configuration.mediaConstraints[kind]) {
continue;
}
let tc = this.pc
.getTransceivers()
.find((tc) => tc.receiver.track.kind === kind);
let tc = this.pc.getTransceivers().find((tc) => tc.receiver.track.kind === kind);
if (tc) {
// Enforce our desired direction.
@ -382,27 +365,25 @@ export class WebRtcPeer {
.createAnswer()
.then((sdpAnswer) => resolve(sdpAnswer))
.catch((error) => reject(error));
} else {
// TODO: Delete else branch when all supported browsers are
// modern enough to implement the Transceiver methods
let offerAudio, offerVideo = true;
let offerAudio,
offerVideo = true;
if (!!this.configuration.mediaConstraints) {
offerAudio = (typeof this.configuration.mediaConstraints.audio === 'boolean') ?
this.configuration.mediaConstraints.audio : true;
offerVideo = (typeof this.configuration.mediaConstraints.video === 'boolean') ?
this.configuration.mediaConstraints.video : true;
offerAudio =
typeof this.configuration.mediaConstraints.audio === 'boolean' ? this.configuration.mediaConstraints.audio : true;
offerVideo =
typeof this.configuration.mediaConstraints.video === 'boolean' ? this.configuration.mediaConstraints.video : true;
const constraints: RTCOfferOptions = {
offerToReceiveAudio: offerAudio,
offerToReceiveVideo: offerVideo
};
this.pc!.createAnswer(constraints)
.then(sdpAnswer => resolve(sdpAnswer))
.catch(error => reject(error));
.then((sdpAnswer) => resolve(sdpAnswer))
.catch((error) => reject(error));
}
}
// else, there is nothing to do; the legacy createAnswer() options do
@ -415,7 +396,8 @@ export class WebRtcPeer {
*/
processLocalOffer(offer: RTCSessionDescriptionInit): Promise<void> {
return new Promise((resolve, reject) => {
this.pc.setLocalDescription(offer)
this.pc
.setLocalDescription(offer)
.then(() => {
const localDescription = this.pc.localDescription;
if (!!localDescription) {
@ -425,7 +407,7 @@ export class WebRtcPeer {
return reject('Local description is not defined');
}
})
.catch(error => reject(error));
.catch((error) => reject(error));
});
}
@ -445,7 +427,7 @@ export class WebRtcPeer {
}
this.setRemoteDescription(offer)
.then(() => resolve())
.catch(error => reject(error));
.catch((error) => reject(error));
});
}
@ -458,9 +440,10 @@ export class WebRtcPeer {
if (this.pc.signalingState === 'closed') {
return reject('RTCPeerConnection is closed when trying to set local description');
}
this.pc.setLocalDescription(answer)
this.pc
.setLocalDescription(answer)
.then(() => resolve())
.catch(error => reject(error));
.catch((error) => reject(error));
});
}
@ -513,7 +496,10 @@ export class WebRtcPeer {
break;
case 'stable':
if (!!this.pc.remoteDescription) {
this.pc.addIceCandidate(iceCandidate).then(() => resolve()).catch(error => reject(error));
this.pc
.addIceCandidate(iceCandidate)
.then(() => resolve())
.catch((error) => reject(error));
} else {
this.iceCandidateList.push(iceCandidate);
resolve();
@ -532,7 +518,12 @@ export class WebRtcPeer {
switch (iceConnectionState) {
case 'disconnected':
// Possible network disconnection
const msg1 = 'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "disconnected". Possible network disconnection';
const msg1 =
'IceConnectionState of RTCPeerConnection ' +
this.configuration.id +
' (' +
otherId +
') change to "disconnected". Possible network disconnection';
logger.warn(msg1);
this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_DISCONNECTED, msg1);
break;
@ -542,19 +533,27 @@ export class WebRtcPeer {
this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_FAILED, msg2);
break;
case 'closed':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "closed"');
logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "closed"'
);
break;
case 'new':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "new"');
break;
case 'checking':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "checking"');
logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "checking"'
);
break;
case 'connected':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "connected"');
logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "connected"'
);
break;
case 'completed':
logger.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "completed"');
logger.log(
'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "completed"'
);
break;
}
});
@ -566,10 +565,8 @@ export class WebRtcPeer {
generateUniqueId(): string {
return uuidv4();
}
}
export class WebRtcPeerRecvonly extends WebRtcPeer {
constructor(configuration: WebRtcPeerConfiguration) {
configuration.mode = 'recvonly';

View File

@ -30,18 +30,18 @@ const logger: OpenViduLogger = OpenViduLogger.getInstance();
let platform: PlatformUtils;
interface WebrtcStatsConfig {
interval: number,
httpEndpoint: string
interval: number;
httpEndpoint: string;
}
interface JSONStatsResponse {
'@timestamp': string,
participant_id: string,
session_id: string,
platform: string,
platform_description: string,
stream: string,
webrtc_stats: IWebrtcStats
'@timestamp': string;
participant_id: string;
session_id: string;
platform: string;
platform_description: string;
stream: string;
webrtc_stats: IWebrtcStats;
}
/**
@ -49,55 +49,62 @@ interface JSONStatsResponse {
*/
interface IWebrtcStats {
inbound?: {
audio: {
bytesReceived: number,
packetsReceived: number,
packetsLost: number,
jitter: number
} | {},
video: {
bytesReceived: number,
packetsReceived: number,
packetsLost: number,
jitter?: number, // Firefox
jitterBufferDelay?: number, // Chrome
framesDecoded: number,
firCount: number,
nackCount: number,
pliCount: number,
frameHeight?: number, // Chrome
frameWidth?: number, // Chrome
framesDropped?: number, // Chrome
framesReceived?: number // Chrome
} | {}
},
audio:
| {
bytesReceived: number;
packetsReceived: number;
packetsLost: number;
jitter: number;
}
| {};
video:
| {
bytesReceived: number;
packetsReceived: number;
packetsLost: number;
jitter?: number; // Firefox
jitterBufferDelay?: number; // Chrome
framesDecoded: number;
firCount: number;
nackCount: number;
pliCount: number;
frameHeight?: number; // Chrome
frameWidth?: number; // Chrome
framesDropped?: number; // Chrome
framesReceived?: number; // Chrome
}
| {};
};
outbound?: {
audio: {
bytesSent: number,
packetsSent: number,
} | {},
video: {
bytesSent: number,
packetsSent: number,
firCount: number,
framesEncoded: number,
nackCount: number,
pliCount: number,
qpSum: number,
frameHeight?: number, // Chrome
frameWidth?: number, // Chrome
framesSent?: number // Chrome
} | {}
},
audio:
| {
bytesSent: number;
packetsSent: number;
}
| {};
video:
| {
bytesSent: number;
packetsSent: number;
firCount: number;
framesEncoded: number;
nackCount: number;
pliCount: number;
qpSum: number;
frameHeight?: number; // Chrome
frameWidth?: number; // Chrome
framesSent?: number; // Chrome
}
| {};
};
candidatepair?: {
currentRoundTripTime?: number // Chrome
availableOutgoingBitrate?: number //Chrome
currentRoundTripTime?: number; // Chrome
availableOutgoingBitrate?: number; //Chrome
// availableIncomingBitrate?: number // No support for any browsers (https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/availableIncomingBitrate)
}
};
};
}
export class WebRtcStats {
private readonly STATS_ITEM_NAME = 'webrtc-stats-config';
private webRtcStatsEnabled = false;
@ -114,23 +121,23 @@ export class WebRtcStats {
}
public initWebRtcStats(): void {
const webrtcObj = localStorage.getItem(this.STATS_ITEM_NAME);
if (!!webrtcObj) {
this.webRtcStatsEnabled = true;
const webrtcStatsConfig: WebrtcStatsConfig = JSON.parse(webrtcObj);
// webrtc object found in local storage
logger.warn('WebRtc stats enabled for stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId);
logger.warn(
'WebRtc stats enabled for stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId
);
logger.warn('localStorage item: ' + JSON.stringify(webrtcStatsConfig));
this.POST_URL = webrtcStatsConfig.httpEndpoint;
this.statsInterval = webrtcStatsConfig.interval; // Interval in seconds
this.statsInterval = webrtcStatsConfig.interval; // Interval in seconds
this.webRtcStatsIntervalId = setInterval(async () => {
await this.sendStatsToHttpEndpoint();
}, this.statsInterval * 1000);
} else {
logger.debug('WebRtc stats not enabled');
}
@ -206,7 +213,6 @@ export class WebRtcStats {
// - ¿React Native?
public getSelectedIceCandidateInfo(): Promise<any> {
return new Promise(async (resolve, reject) => {
const statsReport: any = await this.stream.getRTCPeerConnection().getStats();
let transportStat;
const candidatePairs: Map<string, any> = new Map();
@ -230,7 +236,7 @@ export class WebRtcStats {
});
let selectedCandidatePair;
if (transportStat != null) {
const selectedCandidatePairId = transportStat.selectedCandidatePairId
const selectedCandidatePairId = transportStat.selectedCandidatePairId;
selectedCandidatePair = candidatePairs.get(selectedCandidatePairId);
} else {
// This is basically Firefox
@ -250,9 +256,11 @@ export class WebRtcStats {
if (!!finalLocalCandidate) {
const candList = this.stream.getLocalIceCandidateList();
const cand = candList.filter((c: RTCIceCandidate) => {
return (!!c.candidate &&
return (
!!c.candidate &&
(c.candidate.indexOf(finalLocalCandidate.ip) >= 0 || c.candidate.indexOf(finalLocalCandidate.address) >= 0) &&
c.candidate.indexOf(finalLocalCandidate.port) >= 0);
c.candidate.indexOf(finalLocalCandidate.port) >= 0
);
});
finalLocalCandidate.raw = [];
for (let c of cand) {
@ -266,9 +274,11 @@ export class WebRtcStats {
if (!!finalRemoteCandidate) {
const candList = this.stream.getRemoteIceCandidateList();
const cand = candList.filter((c: RTCIceCandidate) => {
return (!!c.candidate &&
return (
!!c.candidate &&
(c.candidate.indexOf(finalRemoteCandidate.ip) >= 0 || c.candidate.indexOf(finalRemoteCandidate.address) >= 0) &&
c.candidate.indexOf(finalRemoteCandidate.port) >= 0);
c.candidate.indexOf(finalRemoteCandidate.port) >= 0
);
});
finalRemoteCandidate.raw = [];
for (let c of cand) {
@ -288,7 +298,9 @@ export class WebRtcStats {
public stopWebRtcStats() {
if (this.webRtcStatsEnabled) {
clearInterval(this.webRtcStatsIntervalId);
logger.warn('WebRtc stats stopped for disposed stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId);
logger.warn(
'WebRtc stats stopped for disposed stream ' + this.stream.streamId + ' of connection ' + this.stream.connection.connectionId
);
}
}
@ -299,10 +311,9 @@ export class WebRtcStats {
'Content-type': 'application/json'
},
body: JSON.stringify(response),
method: 'POST',
method: 'POST'
};
await fetch(url, configuration);
} catch (error) {
logger.error(`sendStats error: ${JSON.stringify(error)}`);
}
@ -350,9 +361,7 @@ export class WebRtcStats {
// - ¿Ionic?
// - ¿React Native?
public async getCommonStats(): Promise<IWebrtcStats> {
return new Promise(async (resolve, reject) => {
try {
const statsReport: any = await this.stream.getRTCPeerConnection().getStats();
const response: IWebrtcStats = this.getWebRtcStatsResponseOutline();
@ -360,24 +369,23 @@ export class WebRtcStats {
const candidatePairStats = ['availableOutgoingBitrate', 'currentRoundTripTime'];
statsReport.forEach((stat: any) => {
let mediaType = stat.mediaType != null ? stat.mediaType : stat.kind;
const addStat = (direction: string, key: string): void => {
if (stat[key] != null && response[direction] != null) {
if (!mediaType && (videoTrackStats.indexOf(key) > -1)) {
if (!mediaType && videoTrackStats.indexOf(key) > -1) {
mediaType = 'video';
}
if (direction != null && mediaType != null && key != null && response[direction][mediaType] != null) {
response[direction][mediaType][key] = Number(stat[key]);
} else if(direction != null && key != null && candidatePairStats.includes(key)) {
} else if (direction != null && key != null && candidatePairStats.includes(key)) {
// candidate-pair-stats
response[direction][key] = Number(stat[key]);
}
}
}
};
switch (stat.type) {
case "outbound-rtp":
case 'outbound-rtp':
addStat('outbound', 'bytesSent');
addStat('outbound', 'packetsSent');
addStat('outbound', 'framesEncoded');
@ -386,7 +394,7 @@ export class WebRtcStats {
addStat('outbound', 'pliCount');
addStat('outbound', 'qpSum');
break;
case "inbound-rtp":
case 'inbound-rtp':
addStat('inbound', 'bytesReceived');
addStat('inbound', 'packetsReceived');
addStat('inbound', 'packetsLost');
@ -412,7 +420,7 @@ export class WebRtcStats {
});
// Delete candidatepair from response if null
if(!response?.candidatepair || Object.keys(<Object>response.candidatepair).length === 0){
if (!response?.candidatepair || Object.keys(<Object>response.candidatepair).length === 0) {
delete response.candidatepair;
}
@ -421,7 +429,6 @@ export class WebRtcStats {
logger.error('Error getting common stats: ', error);
return reject(error);
}
});
}
@ -455,5 +462,4 @@ export class WebRtcStats {
};
}
}
}
}

View File

@ -45,4 +45,4 @@ export { StreamManagerEventMap } from './OpenViduInternal/Events/EventMap/Stream
export { PublisherEventMap } from './OpenViduInternal/Events/EventMap/PublisherEventMap';
// Disable jsnlog when library is loaded
JL.setOptions({ enabled: false })
JL.setOptions({ enabled: false });