implemented drag & drop for image processing
|
@ -1,3 +1,3 @@
|
||||||
node_modules
|
node_modules
|
||||||
private
|
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
|
samples
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
node_modules
|
node_modules
|
||||||
private
|
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
|
samples
|
||||||
|
typedoc
|
||||||
|
test
|
||||||
|
wiki
|
||||||
|
|
40
README.md
|
@ -23,7 +23,13 @@ JavaScript module using TensorFlow/JS Machine Learning library
|
||||||
Compatible with both software *tfjs-node* and
|
Compatible with both software *tfjs-node* and
|
||||||
GPU accelerated backends *tfjs-node-gpu* using CUDA libraries
|
GPU accelerated backends *tfjs-node-gpu* using CUDA libraries
|
||||||
|
|
||||||
Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) for processing of live WebCam video or static images
|
<br>
|
||||||
|
|
||||||
|
Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) app for processing of live WebCam video or static images
|
||||||
|
|
||||||
|
- To start video detection, simply press *Play*
|
||||||
|
- To process images, simply drag & drop in your Browser window
|
||||||
|
- Note: For optimal performance, select only models you'd like to use
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
@ -89,20 +95,30 @@ All options as presented in the demo application...
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
**Validation image:**
|
**Face Close-up:**
|
||||||
> [demo/index.html](demo/index.html?image=%22../assets/human-sample-upper.jpg%22)
|

|
||||||
|
|
||||||

|
<br>
|
||||||
|
|
||||||
**Using static images:**
|
**Face under a high angle:**
|
||||||
> [demo/index.html](demo/index.html?images=true)
|

|
||||||
|
|
||||||

|
<br>
|
||||||
|
|
||||||
**Live WebCam view:**
|
**Full Person Details:**
|
||||||
> [demo/index.html](demo/index.html)
|

|
||||||
|
|
||||||

|
<br>
|
||||||
|
|
||||||
|
**Pose Detection:**
|
||||||
|

|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
**Large Group:**
|
||||||
|

|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
**Face Similarity Matching:**
|
**Face Similarity Matching:**
|
||||||
Extracts all faces from provided input images,
|
Extracts all faces from provided input images,
|
||||||
|
@ -112,11 +128,15 @@ and optionally matches detected face with database of known people to guess thei
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
**Face3D OpenGL Rendering:**
|
**Face3D OpenGL Rendering:**
|
||||||
> [demo/face3d.html](demo/face3d.html)
|
> [demo/face3d.html](demo/face3d.html)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
**468-Point Face Mesh Defails:**
|
**468-Point Face Mesh Defails:**
|
||||||
(view in full resolution to see keypoints)
|
(view in full resolution to see keypoints)
|
||||||
|
|
||||||
|
|
Before Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 8.4 KiB |
Before Width: | Height: | Size: 41 KiB |
Before Width: | Height: | Size: 152 KiB |
Before Width: | Height: | Size: 141 KiB |
Before Width: | Height: | Size: 178 KiB |
Before Width: | Height: | Size: 216 KiB |
Before Width: | Height: | Size: 206 KiB |
Before Width: | Height: | Size: 162 KiB |
Before Width: | Height: | Size: 295 KiB |
After Width: | Height: | Size: 59 KiB |
After Width: | Height: | Size: 85 KiB |
After Width: | Height: | Size: 369 KiB |
Before Width: | Height: | Size: 434 KiB |
Before Width: | Height: | Size: 50 KiB |
After Width: | Height: | Size: 170 KiB |
After Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 113 KiB |
Before Width: | Height: | Size: 177 KiB |
104
demo/index.js
|
@ -77,6 +77,7 @@ const ui = {
|
||||||
modelsPreload: true, // preload human models on startup
|
modelsPreload: true, // preload human models on startup
|
||||||
modelsWarmup: true, // warmup human models on startup
|
modelsWarmup: true, // warmup human models on startup
|
||||||
buffered: true, // should output be buffered between frames
|
buffered: true, // should output be buffered between frames
|
||||||
|
interpolated: true, // should output be interpolated for smoothness between frames
|
||||||
iconSize: '48px', // ui icon sizes
|
iconSize: '48px', // ui icon sizes
|
||||||
|
|
||||||
// internal variables
|
// internal variables
|
||||||
|
@ -228,8 +229,12 @@ async function drawResults(input) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// draw all results using interpolated results
|
// draw all results using interpolated results
|
||||||
const interpolated = human.next(result);
|
if (ui.interpolated) {
|
||||||
human.draw.all(canvas, interpolated, drawOptions);
|
const interpolated = human.next(result);
|
||||||
|
human.draw.all(canvas, interpolated, drawOptions);
|
||||||
|
} else {
|
||||||
|
human.draw.all(canvas, result, drawOptions);
|
||||||
|
}
|
||||||
/* alternatively use individual functions
|
/* alternatively use individual functions
|
||||||
human.draw.face(canvas, result.face);
|
human.draw.face(canvas, result.face);
|
||||||
human.draw.body(canvas, result.body);
|
human.draw.body(canvas, result.body);
|
||||||
|
@ -246,20 +251,21 @@ async function drawResults(input) {
|
||||||
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
|
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
|
||||||
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
||||||
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
|
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
|
||||||
const avgDetect = Math.trunc(10 * ui.detectFPS.reduce((a, b) => a + b, 0) / ui.detectFPS.length) / 10;
|
const avgDetect = ui.detectFPS.length > 0 ? Math.trunc(10 * ui.detectFPS.reduce((a, b) => a + b, 0) / ui.detectFPS.length) / 10 : 0;
|
||||||
const avgDraw = Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10;
|
const avgDraw = ui.drawFPS.length > 0 ? Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10 : 0;
|
||||||
const warning = (ui.detectFPS.length > 5) && (avgDetect < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
const warning = (ui.detectFPS.length > 5) && (avgDetect < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
||||||
|
const fps = avgDetect > 0 ? `FPS process:${avgDetect} refresh:${avgDraw}` : '';
|
||||||
document.getElementById('log').innerHTML = `
|
document.getElementById('log').innerHTML = `
|
||||||
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
||||||
backend: ${human.tf.getBackend()} | ${memory}<br>
|
backend: ${human.tf.getBackend()} | ${memory}<br>
|
||||||
performance: ${str(lastDetectedResult.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}<br>
|
performance: ${str(lastDetectedResult.performance)}ms ${fps}<br>
|
||||||
${warning}<br>
|
${warning}<br>
|
||||||
`;
|
`;
|
||||||
ui.framesDraw++;
|
ui.framesDraw++;
|
||||||
ui.lastFrame = performance.now();
|
ui.lastFrame = performance.now();
|
||||||
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
|
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
|
||||||
if (ui.buffered) {
|
if (ui.buffered) {
|
||||||
ui.drawThread = requestAnimationFrame(() => drawResults(input, canvas));
|
ui.drawThread = requestAnimationFrame(() => drawResults(input));
|
||||||
} else {
|
} else {
|
||||||
log('stopping buffered refresh');
|
log('stopping buffered refresh');
|
||||||
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
||||||
|
@ -431,7 +437,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
ctx.drawImage(input, 0, 0, canvas.width, canvas.height);
|
ctx.drawImage(input, 0, 0, canvas.width, canvas.height);
|
||||||
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||||
// perform detection in worker
|
// perform detection in worker
|
||||||
webWorker(input, data, canvas, userConfig, timestamp);
|
webWorker(input, data, canvas, timestamp);
|
||||||
status();
|
status();
|
||||||
} else {
|
} else {
|
||||||
human.detect(input, userConfig).then((result) => {
|
human.detect(input, userConfig).then((result) => {
|
||||||
|
@ -457,32 +463,66 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// main processing function when input is image, can use direct invocation or web worker
|
// main processing function when input is image, can use direct invocation or web worker
|
||||||
async function processImage(input) {
|
async function processImage(input, title) {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const image = new Image();
|
const image = new Image();
|
||||||
|
image.onerror = async () => status('image loading error');
|
||||||
image.onload = async () => {
|
image.onload = async () => {
|
||||||
log('processing image:', encodeURI(image.src));
|
ui.interpolated = false; // stop interpolating results if input is image
|
||||||
|
status(`processing image: ${title}`);
|
||||||
const canvas = document.getElementById('canvas');
|
const canvas = document.getElementById('canvas');
|
||||||
image.width = image.naturalWidth;
|
image.width = image.naturalWidth;
|
||||||
image.height = image.naturalHeight;
|
image.height = image.naturalHeight;
|
||||||
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
||||||
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
||||||
|
const origCacheSensitiry = userConfig.cacheSensitivity;
|
||||||
|
userConfig.cacheSensitivity = 0;
|
||||||
const result = await human.detect(image, userConfig);
|
const result = await human.detect(image, userConfig);
|
||||||
|
userConfig.cacheSensitivity = origCacheSensitiry;
|
||||||
lastDetectedResult = result;
|
lastDetectedResult = result;
|
||||||
await drawResults(image);
|
await drawResults(image);
|
||||||
const thumb = document.createElement('canvas');
|
const thumb = document.createElement('canvas');
|
||||||
thumb.className = 'thumbnail';
|
thumb.className = 'thumbnail';
|
||||||
thumb.width = window.innerWidth / (ui.columns + 0.1);
|
thumb.width = ui.columns > 1 ? window.innerWidth / (ui.columns + 0.1) : window.innerWidth - 14;
|
||||||
thumb.height = thumb.width * canvas.height / canvas.width;
|
thumb.height = thumb.width * canvas.height / canvas.width;
|
||||||
if (result.face && result.face.length > 0) {
|
if (result.face && result.face.length > 0) {
|
||||||
thumb.title = result.face.map((a, i) => `#${i} face: ${Math.trunc(100 * a.faceScore)}% box: ${Math.trunc(100 * a.boxScore)}% age: ${Math.trunc(a.age)} gender: ${Math.trunc(100 * a.genderScore)}% ${a.gender}`).join(' | ');
|
thumb.title = result.face.map((a, i) => `#${i} face: ${Math.trunc(100 * a.faceScore)}% box: ${Math.trunc(100 * a.boxScore)}% age: ${Math.trunc(a.age)} gender: ${Math.trunc(100 * a.genderScore)}% ${a.gender}`).join(' | ');
|
||||||
} else {
|
} else {
|
||||||
thumb.title = 'no face detected';
|
thumb.title = 'no face detected';
|
||||||
}
|
}
|
||||||
|
thumb.addEventListener('click', (evt) => {
|
||||||
|
const stdWidth = ui.columns > 1 ? window.innerWidth / (ui.columns + 0.1) : window.innerWidth - 14;
|
||||||
|
// zoom in/out on click
|
||||||
|
if (evt.target.style.width === `${stdWidth}px`) {
|
||||||
|
evt.target.style.width = '';
|
||||||
|
evt.target.style.height = `${document.getElementById('log').offsetTop - document.getElementById('media').offsetTop}px`;
|
||||||
|
} else {
|
||||||
|
evt.target.style.width = `${stdWidth}px`;
|
||||||
|
evt.target.style.height = '';
|
||||||
|
}
|
||||||
|
// copy to clipboard on click
|
||||||
|
if (typeof ClipboardItem !== 'undefined' && navigator.clipboard) {
|
||||||
|
evt.target.toBlob((blob) => {
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
const item = new ClipboardItem({ 'image/png': blob });
|
||||||
|
navigator.clipboard.write([item]);
|
||||||
|
log('copied image to clipboard');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
const ctx = thumb.getContext('2d');
|
const ctx = thumb.getContext('2d');
|
||||||
ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);
|
ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);
|
||||||
document.getElementById('samples-container').appendChild(thumb);
|
const prev = document.getElementsByClassName('thumbnail');
|
||||||
image.src = '';
|
if (prev && prev.length > 0) document.getElementById('samples-container').insertBefore(thumb, prev[0]);
|
||||||
|
else document.getElementById('samples-container').appendChild(thumb);
|
||||||
|
|
||||||
|
// finish up
|
||||||
|
status();
|
||||||
|
document.getElementById('play').style.display = 'none';
|
||||||
|
document.getElementById('loader').style.display = 'none';
|
||||||
|
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
||||||
|
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
||||||
|
|
||||||
resolve(true);
|
resolve(true);
|
||||||
};
|
};
|
||||||
image.src = input;
|
image.src = input;
|
||||||
|
@ -522,11 +562,7 @@ async function detectSampleImages() {
|
||||||
status('processing images');
|
status('processing images');
|
||||||
document.getElementById('samples-container').innerHTML = '';
|
document.getElementById('samples-container').innerHTML = '';
|
||||||
for (const m of Object.values(menu)) m.hide();
|
for (const m of Object.values(menu)) m.hide();
|
||||||
for (const image of ui.samples) await processImage(image);
|
for (const image of ui.samples) await processImage(image, image);
|
||||||
status();
|
|
||||||
document.getElementById('play').style.display = 'none';
|
|
||||||
document.getElementById('loader').style.display = 'none';
|
|
||||||
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupMenu() {
|
function setupMenu() {
|
||||||
|
@ -604,8 +640,8 @@ function setupMenu() {
|
||||||
human.config.hand.rotation = val;
|
human.config.hand.rotation = val;
|
||||||
});
|
});
|
||||||
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||||
menu.process.addButton('process sample images', 'process images', () => detectSampleImages());
|
// menu.process.addButton('process sample images', 'process images', () => detectSampleImages());
|
||||||
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
// menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||||
menu.process.addChart('FPS', 'FPS');
|
menu.process.addChart('FPS', 'FPS');
|
||||||
|
|
||||||
menu.models = new Menu(document.body, '', { top, left: x[3] });
|
menu.models = new Menu(document.body, '', { top, left: x[3] });
|
||||||
|
@ -676,6 +712,31 @@ async function drawWarmup(res) {
|
||||||
await human.draw.all(canvas, res, drawOptions);
|
await human.draw.all(canvas, res, drawOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function processDataURL(f) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const reader = new FileReader();
|
||||||
|
reader.onload = async (e) => {
|
||||||
|
const dataURL = e.target.result;
|
||||||
|
await processImage(dataURL, f.name);
|
||||||
|
document.getElementById('canvas').style.display = 'none';
|
||||||
|
resolve(true);
|
||||||
|
};
|
||||||
|
reader.readAsDataURL(f);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function dragAndDrop() {
|
||||||
|
document.body.addEventListener('dragenter', (evt) => evt.preventDefault());
|
||||||
|
document.body.addEventListener('dragleave', (evt) => evt.preventDefault());
|
||||||
|
document.body.addEventListener('dragover', (evt) => evt.preventDefault());
|
||||||
|
document.body.addEventListener('drop', async (evt) => {
|
||||||
|
evt.preventDefault();
|
||||||
|
evt.dataTransfer.dropEffect = 'copy';
|
||||||
|
if (evt.dataTransfer.files.length < 2) ui.columns = 1;
|
||||||
|
for (const f of evt.dataTransfer.files) await processDataURL(f);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async function pwaRegister() {
|
async function pwaRegister() {
|
||||||
if (!pwa.enabled) return;
|
if (!pwa.enabled) return;
|
||||||
if ('serviceWorker' in navigator) {
|
if ('serviceWorker' in navigator) {
|
||||||
|
@ -790,11 +851,16 @@ async function main() {
|
||||||
document.getElementById('play').style.display = 'block';
|
document.getElementById('play').style.display = 'block';
|
||||||
for (const m of Object.values(menu)) m.hide();
|
for (const m of Object.values(menu)) m.hide();
|
||||||
|
|
||||||
|
// init drag & drop
|
||||||
|
|
||||||
|
await dragAndDrop();
|
||||||
|
|
||||||
if (params.has('image')) {
|
if (params.has('image')) {
|
||||||
try {
|
try {
|
||||||
const image = JSON.parse(params.get('image'));
|
const image = JSON.parse(params.get('image'));
|
||||||
log('overriding image:', image);
|
log('overriding image:', image);
|
||||||
ui.samples = [image];
|
ui.samples = [image];
|
||||||
|
ui.columns = 1;
|
||||||
} catch {
|
} catch {
|
||||||
status('cannot parse input image');
|
status('cannot parse input image');
|
||||||
log('cannot parse input image', params.get('image'));
|
log('cannot parse input image', params.get('image'));
|
||||||
|
|
|
@ -155,8 +155,8 @@ var config = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: false,
|
rotation: true,
|
||||||
maxDetected: 10,
|
maxDetected: 5,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -9806,7 +9806,7 @@ __export(draw_exports, {
|
||||||
person: () => person
|
person: () => person
|
||||||
});
|
});
|
||||||
var options = {
|
var options = {
|
||||||
color: "rgba(173, 216, 230, 0.3)",
|
color: "rgba(173, 216, 230, 0.6)",
|
||||||
labelColor: "rgba(173, 216, 230, 1)",
|
labelColor: "rgba(173, 216, 230, 1)",
|
||||||
shadowColor: "black",
|
shadowColor: "black",
|
||||||
font: 'small-caps 14px "Segoe UI"',
|
font: 'small-caps 14px "Segoe UI"',
|
||||||
|
|
|
@ -156,8 +156,8 @@ var config = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: false,
|
rotation: true,
|
||||||
maxDetected: 10,
|
maxDetected: 5,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -9807,7 +9807,7 @@ __export(draw_exports, {
|
||||||
person: () => person
|
person: () => person
|
||||||
});
|
});
|
||||||
var options = {
|
var options = {
|
||||||
color: "rgba(173, 216, 230, 0.3)",
|
color: "rgba(173, 216, 230, 0.6)",
|
||||||
labelColor: "rgba(173, 216, 230, 1)",
|
labelColor: "rgba(173, 216, 230, 1)",
|
||||||
shadowColor: "black",
|
shadowColor: "black",
|
||||||
font: 'small-caps 14px "Segoe UI"',
|
font: 'small-caps 14px "Segoe UI"',
|
||||||
|
|
|
@ -155,8 +155,8 @@ var config = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: false,
|
rotation: true,
|
||||||
maxDetected: 10,
|
maxDetected: 5,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -9806,7 +9806,7 @@ __export(draw_exports, {
|
||||||
person: () => person
|
person: () => person
|
||||||
});
|
});
|
||||||
var options = {
|
var options = {
|
||||||
color: "rgba(173, 216, 230, 0.3)",
|
color: "rgba(173, 216, 230, 0.6)",
|
||||||
labelColor: "rgba(173, 216, 230, 1)",
|
labelColor: "rgba(173, 216, 230, 1)",
|
||||||
shadowColor: "black",
|
shadowColor: "black",
|
||||||
font: 'small-caps 14px "Segoe UI"',
|
font: 'small-caps 14px "Segoe UI"',
|
||||||
|
|
|
@ -241,10 +241,10 @@ const config: Config = {
|
||||||
// (note: module is not loaded until it is required)
|
// (note: module is not loaded until it is required)
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: 'blazeface.json', // detector model, can be absolute path or relative to modelBasePath
|
modelPath: 'blazeface.json', // detector model, can be absolute path or relative to modelBasePath
|
||||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
rotation: true, // use best-guess rotated face image or just box with rotation as-is
|
||||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||||
// this parameter is not valid in nodejs
|
// this parameter is not valid in nodejs
|
||||||
maxDetected: 10, // maximum number of faces detected in the input
|
maxDetected: 5, // maximum number of faces detected in the input
|
||||||
// should be set to the minimum number for performance
|
// should be set to the minimum number for performance
|
||||||
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
|
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
|
||||||
// only used when cacheSensitivity is not zero
|
// only used when cacheSensitivity is not zero
|
||||||
|
|
|
@ -47,7 +47,7 @@ export interface DrawOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options: DrawOptions = {
|
export const options: DrawOptions = {
|
||||||
color: <string>'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel
|
color: <string>'rgba(173, 216, 230, 0.6)', // 'lightblue' with light alpha channel
|
||||||
labelColor: <string>'rgba(173, 216, 230, 1)', // 'lightblue' with dark alpha channel
|
labelColor: <string>'rgba(173, 216, 230, 1)', // 'lightblue' with dark alpha channel
|
||||||
shadowColor: <string>'black',
|
shadowColor: <string>'black',
|
||||||
font: <string>'small-caps 14px "Segoe UI"',
|
font: <string>'small-caps 14px "Segoe UI"',
|
||||||
|
|