openvidu-testapp: full track-processors-js capabilities

pull/856/head
pabloFuente 2025-12-12 15:33:56 +01:00
parent d9ebae88fa
commit b469cf5455
13 changed files with 1364 additions and 678 deletions

File diff suppressed because it is too large Load Diff

View File

@ -22,6 +22,7 @@
"@angular/platform-browser-dynamic": "^20.3.3",
"@angular/router": "^20.3.3",
"@livekit/protocol": "^1.38.0",
"@livekit/track-processors": "0.7.0",
"buffer": "^6.0.3",
"crypto-browserify": "^3.12.1",
"json-stringify-safe": "5.0.1",
@ -39,6 +40,7 @@
"@angular/build": "^20.3.4",
"@angular/cli": "~20.3.4",
"@angular/compiler-cli": "^20.3.3",
"@types/dom-mediacapture-transform": "^0.1.11",
"@types/events": "^3.0.3",
"@types/jasmine": "~5.1.8",
"@types/json-stringify-safe": "^5.0.3",
@ -52,4 +54,4 @@
"karma-jasmine-html-reporter": "~2.1.0",
"typescript": "5.9.3"
}
}
}

View File

@ -27,6 +27,7 @@ import { MatSlideToggleModule } from '@angular/material/slide-toggle';
import { MatBadgeModule } from '@angular/material/badge';
import { MatProgressSpinnerModule } from '@angular/material/progress-spinner';
import { MatRadioModule } from '@angular/material/radio';
import { MatSliderModule } from '@angular/material/slider';
import { TestScenariosComponent } from './components/test-scenarios/test-scenarios.component';
import { TestSessionsComponent } from './components/test-sessions/test-sessions.component';
@ -46,6 +47,7 @@ import { CallbackPipe } from './pipes/callback.pipe';
import { AppRoutingModule } from './app.routing';
import { VideoResolutionComponent } from './components/dialogs/options-dialog/video-resolution/video-resolution.component';
import { InfoDialogComponent } from './components/dialogs/info-dialog/info-dialog.component';
import { ProcessorDialogComponent } from './components/dialogs/processor-dialog/processor-dialog.component';
@NgModule({ declarations: [
AppComponent,
@ -64,6 +66,7 @@ import { InfoDialogComponent } from './components/dialogs/info-dialog/info-dialo
OptionsDialogComponent,
VideoResolutionComponent,
InfoDialogComponent,
ProcessorDialogComponent,
],
bootstrap: [AppComponent], imports: [FormsModule,
BrowserModule,
@ -86,7 +89,8 @@ import { InfoDialogComponent } from './components/dialogs/info-dialog/info-dialo
MatChipsModule,
MatSlideToggleModule,
MatBadgeModule,
MatProgressSpinnerModule], providers: [
MatProgressSpinnerModule,
MatSliderModule], providers: [
TestFeedService,
{
provide: MAT_FORM_FIELD_DEFAULT_OPTIONS,

View File

@ -0,0 +1,141 @@
.dialog-content-wrapper {
height: 70vh;
max-height: 80vh;
overflow: hidden !important;
display: flex;
flex-direction: column;
padding: 0 !important;
}
.processor-layout {
display: flex;
flex-direction: row;
gap: 20px;
height: 100%;
overflow: hidden;
padding: 24px; /* Standard dialog padding */
}
.video-column {
flex: 0 0 50%;
display: flex;
flex-direction: column;
align-items: center;
justify-content: flex-start;
border-right: 1px solid rgba(0, 0, 0, 0.12);
padding-right: 20px;
overflow-y: auto; /* Allow video column to scroll if video is huge, though unlikely */
}
.controls-column {
flex: 1;
display: flex;
flex-direction: column;
gap: 16px;
overflow-y: auto;
padding-right: 10px;
padding-left: 10px;
}
.video-preview {
width: 100%;
display: flex;
justify-content: center;
}
.video-container {
position: relative;
display: inline-block;
max-width: 100%;
}
.video-container video {
display: block;
max-width: 100%;
max-height: 60vh; /* Use viewport height unit */
border-radius: 4px;
background-color: black;
}
.sample-region-overlay {
position: absolute;
border: 2px solid red;
background-color: rgba(255, 0, 0, 0.2);
pointer-events: none;
z-index: 10;
}
.control-row {
display: flex;
align-items: center;
gap: 10px;
}
.control-row label {
font-weight: bold;
min-width: 120px;
}
.video-btn {
border: none;
background: transparent;
cursor: pointer;
padding: 0;
display: flex;
align-items: center;
justify-content: center;
}
.video-btn mat-icon {
font-size: 24px;
width: 24px;
height: 24px;
}
.processor-mode-option {
width: 100%;
}
.control-row-group {
display: flex;
align-items: center;
gap: 20px;
flex-wrap: wrap;
}
.control-item {
display: flex;
align-items: center;
gap: 8px;
}
.control-item label {
font-weight: bold;
white-space: nowrap;
}
.control-item mat-slider {
width: 100px;
}
.control-group-nowrap {
display: flex;
flex-wrap: nowrap;
gap: 10px;
width: 100%;
/* overflow-x: auto; Removed to prevent clipping of slider labels */
padding-bottom: 5px;
}
.control-group-nowrap .control-item {
flex: 0 0 auto;
}
.control-group-nowrap .control-item mat-slider {
width: 80px;
}
/* Adjust mat-form-field to look normal in dialog */
::ng-deep .processor-mode-option .mat-mdc-form-field-wrapper {
padding-bottom: 0;
}

View File

@ -0,0 +1,212 @@
<h1 mat-dialog-title>Track Processor Settings</h1>
<div mat-dialog-content class="dialog-content-wrapper">
<div class="processor-layout">
<div class="video-column">
<div class="video-preview">
<div class="video-container">
<video #videoElement autoplay playsinline muted></video>
@if (data.videoTrack.processorEnabled && data.videoTrack.segmentationMethod === 'chroma' && data.videoTrack.chromaKey.autoDetect) {
<div class="sample-region-overlay"
[style.left.%]="data.videoTrack.chromaKey.sampleRegion.startX * 100"
[style.width.%]="(data.videoTrack.chromaKey.sampleRegion.endX - data.videoTrack.chromaKey.sampleRegion.startX) * 100"
[style.top.%]="data.videoTrack.chromaKey.sampleRegion.startY * 100"
[style.height.%]="(data.videoTrack.chromaKey.sampleRegion.endY - data.videoTrack.chromaKey.sampleRegion.startY) * 100">
</div>
}
</div>
</div>
</div>
<div class="controls-column">
<div class="control-row">
<label>Processor Status:</label>
<mat-slide-toggle [checked]="data.videoTrack.processorEnabled" (change)="toggleTrackProcessor()" matTooltip="Toggle track processor"></mat-slide-toggle>
</div>
@if (data.videoTrack.processorEnabled) {
<div class="control-row">
<label>Mode:</label>
<mat-form-field class="processor-mode-option" matTooltip="Set processor mode" matTooltipClass="custom-tooltip">
<mat-select [(value)]="data.videoTrack.mode" (selectionChange)="changeProcessorMode($event.value)">
<mat-option value="virtual-background">Virtual Background</mat-option>
<mat-option value="background-blur">Background Blur</mat-option>
<mat-option value="disabled">Disabled</mat-option>
</mat-select>
</mat-form-field>
</div>
@if (data.videoTrack.mode === 'virtual-background') {
<div class="control-row">
<label>Background Type:</label>
<mat-form-field class="processor-mode-option" matTooltip="Set background type" matTooltipClass="custom-tooltip">
<mat-select [(value)]="data.videoTrack.backgroundType" (selectionChange)="changeBackgroundType($event.value)">
<mat-option value="image">Image</mat-option>
<mat-option value="screen">Screen</mat-option>
</mat-select>
</mat-form-field>
</div>
<div class="control-row-group">
<div class="control-item">
<label>Tracking:</label>
<mat-slide-toggle [checked]="data.videoTrack.tracking" (change)="toggleProcessorTracking()" matTooltip="Toggle Tracking"></mat-slide-toggle>
</div>
<div class="control-item">
<label>Scale:</label>
<mat-slider min="0.1" max="1" step="0.1" discrete matTooltip="Change Scale">
<input matSliderThumb [value]="data.videoTrack.scale" (valueChange)="changeProcessorScale($event)">
</mat-slider>
</div>
<div class="control-item">
<label>H. Pos:</label>
<mat-slider min="-1" max="1" step="0.1" discrete matTooltip="Change Horizontal Position">
<input matSliderThumb [value]="data.videoTrack.horizontalPosition" (valueChange)="changeProcessorHorizontalPosition($event)">
</mat-slider>
</div>
<div class="control-item">
<button mat-stroked-button (click)="resetVirtualBackground()" matTooltip="Reset all virtual background defaults">Reset</button>
</div>
</div>
}
@if (data.videoTrack.mode === 'background-blur') {
<div class="control-row">
<label>Blur Radius:</label>
<mat-slider min="1" max="100" step="1" discrete matTooltip="Change Blur Radius">
<input matSliderThumb [value]="data.videoTrack.blurRadius" (valueChange)="changeProcessorBlurRadius($event)">
</mat-slider>
</div>
}
<mat-divider></mat-divider>
<div class="control-row">
<label>Segmentation:</label>
<mat-form-field class="processor-mode-option" matTooltip="Set segmentation method" matTooltipClass="custom-tooltip">
<mat-select [(value)]="data.videoTrack.segmentationMethod" (selectionChange)="changeSegmentationMethod($event.value)">
<mat-option value="mediapipe">MediaPipe</mat-option>
<mat-option value="chroma">Chroma Key</mat-option>
</mat-select>
</mat-form-field>
</div>
@if (data.videoTrack.segmentationMethod === 'mediapipe') {
<div class="control-row">
<div style="display: flex; align-items: center; min-width: 120px;">
<label style="min-width: auto; margin-right: 4px;">Model:</label>
<a href="https://ai.google.dev/edge/mediapipe/solutions/vision/image_segmenter#selfie-model" target="_blank" matTooltip="See available models at ai.google.dev" style="color: inherit; text-decoration: none; display: flex; cursor: pointer;">
<mat-icon style="font-size: 16px; width: 16px; height: 16px; line-height: 16px;">info</mat-icon>
</a>
</div>
<mat-form-field class="processor-mode-option" matTooltip="Set custom model asset path" matTooltipClass="custom-tooltip">
<mat-select [(value)]="data.videoTrack.modelAssetPath" (selectionChange)="changeModelAssetPath($event.value)">
<mat-option value="https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter/float16/latest/selfie_segmenter.tflite">SelfieSegmenter (square) 256 x 256</mat-option>
<mat-option value="https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter_landscape/float16/latest/selfie_segmenter_landscape.tflite">SelfieSegmenter (landscape) 144 x 256</mat-option>
</mat-select>
</mat-form-field>
</div>
}
@if (data.videoTrack.segmentationMethod === 'chroma') {
<div class="control-row-group">
<div class="control-item">
<label>Auto Detect:</label>
<mat-slide-toggle [(ngModel)]="data.videoTrack.chromaKey.autoDetect" (change)="updateChromaKey()" matTooltip="Auto Detect Chroma Key"></mat-slide-toggle>
</div>
@if (!data.videoTrack.chromaKey.autoDetect) {
<div class="control-row">
<label>HSV range:</label>
<div class="control-group-nowrap">
<div class="control-item">
<label style="min-width: auto">H:</label>
<mat-slider min="0" max="255" discrete matTooltip="Hue Range">
<input matSliderStartThumb [(ngModel)]="data.videoTrack.chromaKey.hueRange[0]" (valueChange)="updateChromaKey()">
<input matSliderEndThumb [(ngModel)]="data.videoTrack.chromaKey.hueRange[1]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
<div class="control-item">
<label style="min-width: auto">S:</label>
<mat-slider min="0" max="255" discrete matTooltip="Saturation Range">
<input matSliderStartThumb [(ngModel)]="data.videoTrack.chromaKey.saturationRange[0]" (valueChange)="updateChromaKey()">
<input matSliderEndThumb [(ngModel)]="data.videoTrack.chromaKey.saturationRange[1]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
<div class="control-item">
<label style="min-width: auto">V:</label>
<mat-slider min="0" max="255" discrete matTooltip="Value Range">
<input matSliderStartThumb [(ngModel)]="data.videoTrack.chromaKey.valueRange[0]" (valueChange)="updateChromaKey()">
<input matSliderEndThumb [(ngModel)]="data.videoTrack.chromaKey.valueRange[1]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
</div>
</div>
} @else {
<div class="control-row">
<label>Threshold:</label>
<div class="control-group-nowrap">
<div class="control-item">
<label style="min-width: auto">H:</label>
<mat-slider min="0" max="255" discrete matTooltip="Hue Threshold">
<input matSliderThumb [(ngModel)]="data.videoTrack.chromaKey.autoDetectThreshold[0]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
<div class="control-item">
<label style="min-width: auto">S:</label>
<mat-slider min="0" max="255" discrete matTooltip="Saturation Threshold">
<input matSliderThumb [(ngModel)]="data.videoTrack.chromaKey.autoDetectThreshold[1]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
<div class="control-item">
<label style="min-width: auto">V:</label>
<mat-slider min="0" max="255" discrete matTooltip="Value Threshold">
<input matSliderThumb [(ngModel)]="data.videoTrack.chromaKey.autoDetectThreshold[2]" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
</div>
</div>
<div class="control-row">
<label>Sample Region:</label>
<div class="control-group-nowrap">
<div class="control-item">
<label style="min-width: auto">X:</label>
<mat-slider min="0" max="1" step="0.01" discrete matTooltip="Auto-detect Sample Region (X-axis)">
<input matSliderStartThumb [(ngModel)]="data.videoTrack.chromaKey.sampleRegion.startX" (valueChange)="updateChromaKey()">
<input matSliderEndThumb [(ngModel)]="data.videoTrack.chromaKey.sampleRegion.endX" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
<div class="control-item">
<label style="min-width: auto">Y:</label>
<mat-slider min="0" max="1" step="0.01" discrete matTooltip="Auto-detect Sample Region (Y-axis)">
<input matSliderStartThumb [(ngModel)]="data.videoTrack.chromaKey.sampleRegion.startY" (valueChange)="updateChromaKey()">
<input matSliderEndThumb [(ngModel)]="data.videoTrack.chromaKey.sampleRegion.endY" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
</div>
</div>
<div class="control-item">
<label>Frame Auto-Detect Interval:</label>
<mat-slider min="1" max="100" step="1" discrete matTooltip="Auto-detect Frame Interval">
<input matSliderThumb [(ngModel)]="data.videoTrack.chromaKey.autoDetectFrameInterval" (valueChange)="updateChromaKey()">
</mat-slider>
</div>
}
<div class="control-item">
<button mat-stroked-button (click)="resetAllChromaKeyDefaults()" matTooltip="Reset all chroma defaults">Reset</button>
</div>
</div>
}
}
</div>
</div>
</div>
<div mat-dialog-actions align="end">
<button mat-button (click)="close()">Close</button>
</div>

View File

@ -0,0 +1,223 @@
import {
AfterViewInit,
Component,
ElementRef,
Inject,
OnDestroy,
ViewChild,
} from '@angular/core';
import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog';
import { VideoTrackComponent } from '../../video-track/video-track.component';
import { LocalVideoTrack } from 'livekit-client';
import {
BackgroundOptions,
BackgroundProcessor,
} from '@livekit/track-processors';
@Component({
selector: 'app-processor-dialog',
templateUrl: './processor-dialog.component.html',
styleUrls: ['./processor-dialog.component.css'],
standalone: false,
})
export class ProcessorDialogComponent implements AfterViewInit, OnDestroy {
@ViewChild('videoElement') videoElement: ElementRef<HTMLVideoElement>;
constructor(
public dialogRef: MatDialogRef<ProcessorDialogComponent>,
@Inject(MAT_DIALOG_DATA) public data: { videoTrack: VideoTrackComponent }
) {}
ngAfterViewInit() {
if (this.data.videoTrack._track) {
(this.data.videoTrack._track as LocalVideoTrack).attach(
this.videoElement.nativeElement
);
}
}
ngOnDestroy() {
if (this.data.videoTrack._track) {
(this.data.videoTrack._track as LocalVideoTrack).detach(
this.videoElement.nativeElement
);
}
}
close() {
this.dialogRef.close();
}
async toggleTrackProcessor() {
if (this.data.videoTrack.processorEnabled) {
await (this.data.videoTrack._track! as LocalVideoTrack).stopProcessor();
this.data.videoTrack.processorEnabled = false;
if (this.data.videoTrack.screenShareTrack) {
this.data.videoTrack.screenShareTrack.stop();
this.data.videoTrack.screenShareTrack = undefined;
}
} else {
const options = await this.getProcessorOptions();
this.data.videoTrack.processor = BackgroundProcessor(options);
await (this.data.videoTrack._track! as LocalVideoTrack).setProcessor(
this.data.videoTrack.processor
);
this.data.videoTrack.processorEnabled = true;
}
}
async changeProcessorMode(
mode: 'disabled' | 'background-blur' | 'virtual-background'
) {
this.data.videoTrack.mode = mode;
const options = await this.getProcessorOptions();
await this.data.videoTrack.processor.switchTo(options);
}
async changeBackgroundType(type: 'image' | 'screen') {
this.data.videoTrack.backgroundType = type;
const options = await this.getProcessorOptions();
await this.data.videoTrack.processor.switchTo(options);
}
private async getProcessorOptions(): Promise<any> {
const options: any = {
mode: this.data.videoTrack.mode,
tracking: this.data.videoTrack.tracking,
scale: this.data.videoTrack.scale,
horizontalPosition: this.data.videoTrack.horizontalPosition,
blurRadius: this.data.videoTrack.blurRadius,
segmentationMethod: this.data.videoTrack.segmentationMethod,
};
if (this.data.videoTrack.segmentationMethod === 'mediapipe' && this.data.videoTrack.modelAssetPath) {
options.assetPaths = { modelAssetPath: this.data.videoTrack.modelAssetPath };
}
if (this.data.videoTrack.segmentationMethod === 'chroma') {
options.chromaKey = { ...this.data.videoTrack.chromaKey };
}
if (this.data.videoTrack.mode === 'virtual-background') {
if (this.data.videoTrack.backgroundType === 'image') {
options.imagePath =
'https://raw.githubusercontent.com/OpenVidu/openvidu.io/refs/heads/main/docs/assets/images/advanced-features/recording1.png';
} else {
if (
!this.data.videoTrack.screenShareTrack ||
this.data.videoTrack.screenShareTrack.readyState === 'ended'
) {
const screenShareTracks =
await this.data.videoTrack.localParticipant?.createScreenTracks({
video: true,
audio: false,
});
if (screenShareTracks && screenShareTracks.length > 0) {
this.data.videoTrack.screenShareTrack =
screenShareTracks[0].mediaStreamTrack;
}
}
if (this.data.videoTrack.screenShareTrack) {
options.videoTrack = this.data.videoTrack.screenShareTrack;
}
}
}
return options;
}
async toggleProcessorTracking() {
this.data.videoTrack.tracking = !this.data.videoTrack.tracking;
await this.data.videoTrack.processor.updateTransformerOptions({
tracking: this.data.videoTrack.tracking,
} as BackgroundOptions);
}
async changeProcessorScale(scale: number) {
this.data.videoTrack.scale = scale;
await this.data.videoTrack.processor.updateTransformerOptions({
scale: this.data.videoTrack.scale,
} as BackgroundOptions);
}
async changeProcessorHorizontalPosition(horizontalPosition: number) {
this.data.videoTrack.horizontalPosition = horizontalPosition;
await this.data.videoTrack.processor.updateTransformerOptions({
horizontalPosition: this.data.videoTrack.horizontalPosition,
} as any);
}
async changeProcessorBlurRadius(blurRadius: number) {
this.data.videoTrack.blurRadius = blurRadius;
await this.data.videoTrack.processor.updateTransformerOptions({
blurRadius: this.data.videoTrack.blurRadius,
} as any);
}
async resetVirtualBackground() {
this.data.videoTrack.tracking = false;
this.data.videoTrack.scale = 1;
this.data.videoTrack.horizontalPosition = 0;
await this.data.videoTrack.processor.updateTransformerOptions({
tracking: this.data.videoTrack.tracking,
scale: this.data.videoTrack.scale,
horizontalPosition: this.data.videoTrack.horizontalPosition,
} as any);
}
async changeModelAssetPath(path: string) {
this.data.videoTrack.modelAssetPath = path;
if (this.data.videoTrack.processorEnabled) {
const options = await this.getProcessorOptions();
// Recreate processor because assetPaths is a constructor option
await (this.data.videoTrack._track! as LocalVideoTrack).stopProcessor();
this.data.videoTrack.processor = BackgroundProcessor(options);
await (this.data.videoTrack._track! as LocalVideoTrack).setProcessor(
this.data.videoTrack.processor
);
await this.data.videoTrack.processor.updateTransformerOptions(options);
}
}
async changeSegmentationMethod(method: 'mediapipe' | 'chroma') {
this.data.videoTrack.segmentationMethod = method;
const options = await this.getProcessorOptions();
// Recreate processor because segmentationMethod is a constructor option
await (this.data.videoTrack._track! as LocalVideoTrack).stopProcessor();
this.data.videoTrack.processor = BackgroundProcessor(options);
await (this.data.videoTrack._track! as LocalVideoTrack).setProcessor(
this.data.videoTrack.processor
);
}
async updateChromaKey() {
// Ensure numeric type for autoDetectFrameInterval
if (this.data.videoTrack.chromaKey.autoDetectFrameInterval) {
this.data.videoTrack.chromaKey.autoDetectFrameInterval = Number(
this.data.videoTrack.chromaKey.autoDetectFrameInterval
);
}
console.log(
'Updating Chroma Key options:',
JSON.stringify(this.data.videoTrack.chromaKey)
);
await this.data.videoTrack.processor.updateTransformerOptions({
chromaKey: { ...this.data.videoTrack.chromaKey },
} as any);
}
async resetAllChromaKeyDefaults() {
this.data.videoTrack.chromaKey.autoDetect = true;
this.data.videoTrack.chromaKey.autoDetectThreshold = [70, 70, 70];
this.data.videoTrack.chromaKey.sampleRegion = {
startX: 0.05,
endX: 0.2,
startY: 0.08,
endY: 0.25,
};
this.data.videoTrack.chromaKey.autoDetectFrameInterval = 30;
this.data.videoTrack.chromaKey.hueRange = [60, 130];
this.data.videoTrack.chromaKey.saturationRange = [50, 255];
this.data.videoTrack.chromaKey.valueRange = [50, 255];
await this.updateChromaKey();
}
}

View File

@ -72,13 +72,13 @@
@for (trackPublication of participant.audioTrackPublications| keyvalue; track trackPublication) {
<app-audio-track
[index]="index" [trackPublication]="trackPublication.value" [track]="trackPublication.value.audioTrack"
[localParticipant]="localParticipant" (newTrackEvent)="events.push($event)"></app-audio-track>
[localParticipant]="localParticipant" (newTrackEvent)="onTrackEvent($event)"></app-audio-track>
}
</div>
@for (trackPublication of participant.videoTrackPublications | keyvalue; track trackPublication) {
<app-video-track
[index]="index" [trackPublication]="trackPublication.value" [track]="trackPublication.value.videoTrack"
[localParticipant]="localParticipant" (newTrackEvent)="events.push($event)"></app-video-track>
[localParticipant]="localParticipant" (newTrackEvent)="onTrackEvent($event)"></app-video-track>
}
</div>
}

View File

@ -1,4 +1,4 @@
import { Component, EventEmitter, Input, Output, Inject } from '@angular/core';
import { Component, EventEmitter, Input, Output, Inject, ChangeDetectorRef } from '@angular/core';
import { MatDialog } from '@angular/material/dialog';
import {
AudioCaptureOptions,
@ -65,7 +65,8 @@ export class ParticipantComponent {
constructor(
private testFeedService: TestFeedService,
@Inject(MatDialog) private dialog: MatDialog
@Inject(MatDialog) private dialog: MatDialog,
private cdr: ChangeDetectorRef
) {}
ngOnInit() {
@ -82,6 +83,11 @@ export class ParticipantComponent {
);
}
onTrackEvent(event: TestAppEvent) {
this.events.push(event);
this.cdr.detectChanges();
}
async addVideoTrack() {
const options =
this.createLocalTracksOptions.video === true

View File

@ -48,7 +48,7 @@ export class TrackComponent {
private indexId: string;
private trackId: string;
protected _track: Track | undefined;
public _track: Track | undefined;
@ViewChild('mediaElement') protected elementRef: ElementRef;
trackSubscribed: boolean = true;

View File

@ -15,6 +15,13 @@
aria-hidden="true">stop</mat-icon>
</button>
}
@if (localParticipant) {
<button (click)="openProcessorDialog()" class="video-btn toggle-track-processor" matTooltip="Open track processor settings"
matTooltipClass="custom-tooltip">
<mat-icon aria-label="Open track processor settings" class="mat-icon material-icons" role="img"
aria-hidden="true">auto_fix_high</mat-icon>
</button>
}
@if (!localParticipant) {
<button (click)="toggleEnableTrack()" class="video-btn toggle-video-enabled" matTooltip="Toggle track enabled"
matTooltipClass="custom-tooltip">

View File

@ -9,6 +9,7 @@ import { TrackComponent } from '../track/track.component';
import { MatDialog } from '@angular/material/dialog';
import { TestFeedService } from 'src/app/services/test-feed.service';
import { InfoDialogComponent } from '../dialogs/info-dialog/info-dialog.component';
import { ProcessorDialogComponent } from '../dialogs/processor-dialog/processor-dialog.component';
@Component({
selector: 'app-video-track',
@ -18,11 +19,34 @@ import { InfoDialogComponent } from '../dialogs/info-dialog/info-dialog.componen
})
export class VideoTrackComponent extends TrackComponent {
muteVideoIcon: string = 'videocam';
blurIcon: string = 'blur_on';
maxVideoQuality: string;
videoZoom = false;
// Processor state
processor: any;
mode: 'disabled' | 'background-blur' | 'virtual-background' | undefined =
'virtual-background';
backgroundType: 'image' | 'screen' = 'image';
tracking: boolean = false;
scale: number = 1;
horizontalPosition: number = 0;
blurRadius: number = 10;
processorEnabled: boolean = false;
screenShareTrack: MediaStreamTrack | undefined;
segmentationMethod: 'mediapipe' | 'chroma' = 'mediapipe';
modelAssetPath: string = 'https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter/float16/latest/selfie_segmenter.tflite';
chromaKey = {
autoDetect: true,
autoDetectThreshold: [70, 70, 70] as [number, number, number],
hueRange: [60, 130] as [number, number],
saturationRange: [50, 255] as [number, number],
valueRange: [50, 255] as [number, number],
sampleRegion: { startX: 0.05, endX: 0.2, startY: 0.08, endY: 0.25 },
autoDetectFrameInterval: 30
};
constructor(
protected override testFeedService: TestFeedService,
@Inject(MatDialog) private dialog: MatDialog
@ -101,14 +125,15 @@ export class VideoTrackComponent extends TrackComponent {
});
}
async blur() {
if (this.blurIcon == 'blur_on') {
// await (this._track! as LocalVideoTrack).setProcessor(BackgroundBlur());
this.blurIcon = 'blur_off';
} else {
// await (this._track! as LocalVideoTrack).stopProcessor();
this.blurIcon = 'blur_on';
}
openProcessorDialog() {
this.dialog.open(ProcessorDialogComponent, {
data: {
videoTrack: this,
},
width: '1400px',
maxWidth: '95vw',
maxHeight: '95vh',
});
}
toggleVideoZoom() {

View File

@ -3,7 +3,10 @@
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "./out-tsc/app",
"types": []
"types": [
"dom-mediacapture-transform",
"dom-webcodecs"
]
},
"files": [
"src/main.ts",

View File

@ -20,6 +20,7 @@
"module": "ES2022",
"useDefineForClassFields": false,
"strictPropertyInitialization": false,
"skipLibCheck": true,
"lib": [
"ES2022",
"dom"