mirror of https://github.com/vladmandic/human
fix camera restart on resize
parent
73074cd78a
commit
26c32bc8af
|
@ -28,10 +28,9 @@
|
|||
- [**Notes on Backends**](https://github.com/vladmandic/human/wiki/Backends)
|
||||
- [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server)
|
||||
- [**Build Process**](https://github.com/vladmandic/human/wiki/Build-Process)
|
||||
- [**List of Models**](https://github.com/vladmandic/human/wiki/Models)
|
||||
- [**Performance Notes**](https://github.com/vladmandic/human/wiki/Performance)
|
||||
- [**Performance Profiling**](https://github.com/vladmandic/human/wiki/Profiling)
|
||||
- [**Credits**](https://github.com/vladmandic/human/wiki/Credits)
|
||||
- [**List of Models && Credits**](https://github.com/vladmandic/human/wiki/Models)
|
||||
|
||||
<br>
|
||||
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 126 KiB After Width: | Height: | Size: 42 KiB |
|
@ -39,6 +39,7 @@ const ui = {
|
|||
buffered: false,
|
||||
bufferedFPSTarget: 24,
|
||||
drawThread: null,
|
||||
detectThread: null,
|
||||
framesDraw: 0,
|
||||
framesDetect: 0,
|
||||
bench: false,
|
||||
|
@ -155,6 +156,7 @@ async function setupCamera() {
|
|||
const canvas = document.getElementById('canvas');
|
||||
const output = document.getElementById('log');
|
||||
const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;
|
||||
console.log('camera live', live);
|
||||
let msg = '';
|
||||
status('setting up camera');
|
||||
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
|
||||
|
@ -206,7 +208,10 @@ async function setupCamera() {
|
|||
// silly font resizing for paint-on-canvas since viewport can be zoomed
|
||||
const size = 14 + (6 * canvas.width / window.innerWidth);
|
||||
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
||||
console.log('camera continue', live);
|
||||
if (live) video.play();
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
if (live && !ui.detectThread) runHumanDetect(video, canvas);
|
||||
ui.busy = false;
|
||||
// do once more because onresize events can be delayed or skipped
|
||||
// if (video.width > window.innerWidth) await setupCamera();
|
||||
|
@ -230,7 +235,7 @@ function webWorker(input, image, canvas, timestamp) {
|
|||
ui.framesDetect++;
|
||||
if (!ui.drawThread) drawResults(input);
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||
});
|
||||
}
|
||||
// pass image data as arraybuffer to worker by reference to avoid copy
|
||||
|
@ -245,7 +250,9 @@ function runHumanDetect(input, canvas, timestamp) {
|
|||
if (!live && input.srcObject) {
|
||||
// stop ui refresh
|
||||
if (ui.drawThread) clearTimeout(ui.drawThread);
|
||||
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
||||
ui.drawThread = null;
|
||||
ui.detectThread = null;
|
||||
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
|
||||
if (input.paused) log('camera paused');
|
||||
else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||
|
@ -274,7 +281,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
|||
lastDetectedResult = result;
|
||||
if (!ui.drawThread) drawResults(input);
|
||||
ui.framesDetect++;
|
||||
requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -326,7 +333,7 @@ async function detectVideo() {
|
|||
status('');
|
||||
video.play();
|
||||
}
|
||||
runHumanDetect(video, canvas);
|
||||
if (!ui.detectThread) runHumanDetect(video, canvas);
|
||||
}
|
||||
|
||||
// just initialize everything and call main function
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env -S node --no-deprecation --trace-warnings
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
|
||||
async function analyzeGraph(modelPath) {
|
||||
if (!fs.existsSync(modelPath)) log.warn('path does not exist:', modelPath);
|
||||
const stat = fs.statSync(modelPath);
|
||||
let model;
|
||||
if (stat.isFile()) model = await tf.loadGraphModel(`file://${modelPath}`);
|
||||
else model = await tf.loadGraphModel(`file://${path.join(modelPath, 'model.json')}`);
|
||||
log.info('graph model:', modelPath, tf.memory());
|
||||
// console.log(model.executor.graph.signature.inputs);
|
||||
// console.log(model.executor.graph.inputs);
|
||||
if (model.executor.graph.signature.inputs) {
|
||||
const inputs = Object.values(model.executor.graph.signature.inputs)[0];
|
||||
log.data('inputs:', { name: inputs.name, dtype: inputs.dtype, shape: inputs.tensorShape.dim });
|
||||
} else {
|
||||
const inputs = model.executor.graph.inputs[0];
|
||||
log.data('inputs:', { name: inputs.name, dtype: inputs.attrParams.dtype.value, shape: inputs.attrParams.shape.value });
|
||||
}
|
||||
const outputs = [];
|
||||
let i = 0;
|
||||
if (model.executor.graph.signature.outputs) {
|
||||
for (const [key, val] of Object.entries(model.executor.graph.signature.outputs)) {
|
||||
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape: val.tensorShape?.dim });
|
||||
}
|
||||
} else {
|
||||
for (const out of model.executor.graph.outputs) {
|
||||
outputs.push({ id: i++, name: out.name });
|
||||
}
|
||||
}
|
||||
log.data('outputs:', outputs);
|
||||
}
|
||||
|
||||
async function analyzeSaved(modelPath) {
|
||||
const meta = await tf.node.getMetaGraphsFromSavedModel(modelPath);
|
||||
log.info('saved model:', modelPath);
|
||||
const sign = Object.values(meta[0].signatureDefs)[0];
|
||||
log.data('tags:', meta[0].tags);
|
||||
log.data('signature:', Object.keys(meta[0].signatureDefs));
|
||||
const inputs = Object.values(sign.inputs)[0];
|
||||
log.data('inputs:', { name: inputs.name, dtype: inputs.dtype, dimensions: inputs.shape.length });
|
||||
const outputs = [];
|
||||
let i = 0;
|
||||
for (const [key, val] of Object.entries(sign.outputs)) {
|
||||
outputs.push({ id: i++, name: key, dytpe: val.dtype, dimensions: val.shape.length });
|
||||
}
|
||||
log.data('outputs:', outputs);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
log.header();
|
||||
if (process.argv.length !== 3) log.error('path required');
|
||||
else if (!fs.existsSync(process.argv[2])) log.error(`path does not exist: ${process.argv[2]}`);
|
||||
else if (fs.existsSync(path.join(process.argv[2], '/saved_model.pb'))) analyzeSaved(process.argv[2]);
|
||||
else if (fs.existsSync(path.join(process.argv[2], '/model.json')) || process.argv[2].endsWith('.json')) analyzeGraph(process.argv[2]);
|
||||
else log.error('path does not contain valid model');
|
||||
}
|
||||
|
||||
main();
|
|
@ -69764,7 +69764,7 @@ var require_facepipeline = __commonJS((exports) => {
|
|||
this.skipped++;
|
||||
let useFreshBox = false;
|
||||
let detector;
|
||||
if (this.skipped > config2.detector.skipFrames || !config2.mesh.enabled) {
|
||||
if (this.skipped > config2.detector.skipFrames || !config2.mesh.enabled || !config2.videoOptimized) {
|
||||
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||
if (input.shape[1] !== 255 && input.shape[2] !== 255)
|
||||
this.skipped = 0;
|
||||
|
@ -69985,7 +69985,7 @@ var require_age = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.age)
|
||||
return null;
|
||||
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
||||
if (frame < config2.face.age.skipFrames && config2.videoOptimized && last.age && last.age > 0) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70036,7 +70036,7 @@ var require_gender = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.gender)
|
||||
return null;
|
||||
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
||||
if (frame < config2.face.gender.skipFrames && config2.videoOptimized && last.gender !== "") {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70111,7 +70111,7 @@ var require_emotion = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.emotion)
|
||||
return null;
|
||||
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
||||
if (frame < config2.face.emotion.skipFrames && config2.videoOptimized && last.length > 0) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70882,7 +70882,7 @@ var require_handpipeline = __commonJS((exports) => {
|
|||
this.skipped++;
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
if (this.skipped > config2.skipFrames || !config2.landmarks) {
|
||||
if (this.skipped > config2.skipFrames || !config2.landmarks || !config2.videoOptimized) {
|
||||
boxes = await this.boxDetector.estimateHandBounds(image2, config2);
|
||||
if (image2.shape[1] !== 255 && image2.shape[2] !== 255)
|
||||
this.skipped = 0;
|
||||
|
@ -98511,11 +98511,7 @@ var config_default = {
|
|||
}
|
||||
}
|
||||
};
|
||||
var version3 = "0.9.0";
|
||||
const disableSkipFrames = {
|
||||
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, gender: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
||||
hand: {skipFrames: 0}
|
||||
};
|
||||
var version3 = "0.9.1";
|
||||
const now2 = () => {
|
||||
if (typeof performance !== "undefined")
|
||||
return performance.now();
|
||||
|
@ -98782,8 +98778,6 @@ class Human {
|
|||
this.state = "config";
|
||||
let timeStamp;
|
||||
this.config = mergeDeep(this.config, userConfig2);
|
||||
if (!this.config.videoOptimized)
|
||||
this.config = mergeDeep(this.config, disableSkipFrames);
|
||||
this.state = "check";
|
||||
const error = this.sanity(input);
|
||||
if (error) {
|
||||
|
@ -99673,6 +99667,7 @@ const ui = {
|
|||
buffered: false,
|
||||
bufferedFPSTarget: 24,
|
||||
drawThread: null,
|
||||
detectThread: null,
|
||||
framesDraw: 0,
|
||||
framesDetect: 0,
|
||||
bench: false
|
||||
|
@ -99770,6 +99765,7 @@ async function setupCamera() {
|
|||
const canvas = document.getElementById("canvas");
|
||||
const output = document.getElementById("log");
|
||||
const live = video.srcObject ? video.srcObject.getVideoTracks()[0].readyState === "live" && video.readyState > 2 && !video.paused : false;
|
||||
console.log("camera live", live);
|
||||
let msg = "";
|
||||
status("setting up camera");
|
||||
if (!navigator.mediaDevices) {
|
||||
|
@ -99823,8 +99819,11 @@ ${msg}`;
|
|||
ui.menuHeight.input.setAttribute("value", video.height);
|
||||
const size = 14 + 6 * canvas.width / window.innerWidth;
|
||||
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
||||
console.log("camera continue", live);
|
||||
if (live)
|
||||
video.play();
|
||||
if (live && !ui.detectThread)
|
||||
runHumanDetect(video, canvas);
|
||||
ui.busy = false;
|
||||
status("");
|
||||
resolve(video);
|
||||
|
@ -99844,7 +99843,7 @@ function webWorker(input, image2, canvas, timestamp) {
|
|||
ui.framesDetect++;
|
||||
if (!ui.drawThread)
|
||||
drawResults(input);
|
||||
requestAnimationFrame((now3) => runHumanDetect(input, canvas, now3));
|
||||
ui.detectThread = requestAnimationFrame((now3) => runHumanDetect(input, canvas, now3));
|
||||
});
|
||||
}
|
||||
if (ui.bench)
|
||||
|
@ -99857,7 +99856,10 @@ function runHumanDetect(input, canvas, timestamp) {
|
|||
if (!live && input.srcObject) {
|
||||
if (ui.drawThread)
|
||||
clearTimeout(ui.drawThread);
|
||||
if (ui.detectThread)
|
||||
cancelAnimationFrame(ui.detectThread);
|
||||
ui.drawThread = null;
|
||||
ui.detectThread = null;
|
||||
if (input.paused)
|
||||
log2("camera paused");
|
||||
else if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
|
||||
|
@ -99891,7 +99893,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
|||
if (!ui.drawThread)
|
||||
drawResults(input);
|
||||
ui.framesDetect++;
|
||||
requestAnimationFrame((now3) => runHumanDetect(input, canvas, now3));
|
||||
ui.detectThread = requestAnimationFrame((now3) => runHumanDetect(input, canvas, now3));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -99939,7 +99941,8 @@ async function detectVideo() {
|
|||
status("");
|
||||
video.play();
|
||||
}
|
||||
runHumanDetect(video, canvas);
|
||||
if (!ui.detectThread)
|
||||
runHumanDetect(video, canvas);
|
||||
}
|
||||
async function detectSampleImages() {
|
||||
document.getElementById("play").style.display = "none";
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -5,7 +5,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"demo/browser.js": {
|
||||
"bytes": 21317,
|
||||
"bytes": 21691,
|
||||
"imports": [
|
||||
{
|
||||
"path": "dist/human.esm.js"
|
||||
|
@ -30,7 +30,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"bytes": 3456039,
|
||||
"bytes": 3455902,
|
||||
"imports": []
|
||||
}
|
||||
},
|
||||
|
@ -38,13 +38,13 @@
|
|||
"dist/demo-browser-index.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5448434
|
||||
"bytes": 5448028
|
||||
},
|
||||
"dist/demo-browser-index.js": {
|
||||
"imports": [],
|
||||
"inputs": {
|
||||
"dist/human.esm.js": {
|
||||
"bytesInOutput": 3445365
|
||||
"bytesInOutput": 3445228
|
||||
},
|
||||
"demo/draw.js": {
|
||||
"bytesInOutput": 9599
|
||||
|
@ -56,10 +56,10 @@
|
|||
"bytesInOutput": 9770
|
||||
},
|
||||
"demo/browser.js": {
|
||||
"bytesInOutput": 18714
|
||||
"bytesInOutput": 19051
|
||||
}
|
||||
},
|
||||
"bytes": 3497406
|
||||
"bytes": 3497606
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69791,7 +69791,7 @@ var require_facepipeline = __commonJS((exports) => {
|
|||
this.skipped++;
|
||||
let useFreshBox = false;
|
||||
let detector;
|
||||
if (this.skipped > config2.detector.skipFrames || !config2.mesh.enabled) {
|
||||
if (this.skipped > config2.detector.skipFrames || !config2.mesh.enabled || !config2.videoOptimized) {
|
||||
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||
if (input.shape[1] !== 255 && input.shape[2] !== 255)
|
||||
this.skipped = 0;
|
||||
|
@ -70018,7 +70018,7 @@ var require_age = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.age)
|
||||
return null;
|
||||
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
||||
if (frame < config2.face.age.skipFrames && config2.videoOptimized && last.age && last.age > 0) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70071,7 +70071,7 @@ var require_gender = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.gender)
|
||||
return null;
|
||||
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
||||
if (frame < config2.face.gender.skipFrames && config2.videoOptimized && last.gender !== "") {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70148,7 +70148,7 @@ var require_emotion = __commonJS((exports) => {
|
|||
async function predict2(image2, config2) {
|
||||
if (!models.emotion)
|
||||
return null;
|
||||
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
||||
if (frame < config2.face.emotion.skipFrames && config2.videoOptimized && last.length > 0) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
@ -70947,7 +70947,7 @@ var require_handpipeline = __commonJS((exports) => {
|
|||
this.skipped++;
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
if (this.skipped > config2.skipFrames || !config2.landmarks) {
|
||||
if (this.skipped > config2.skipFrames || !config2.landmarks || !config2.videoOptimized) {
|
||||
boxes = await this.boxDetector.estimateHandBounds(image2, config2);
|
||||
if (image2.shape[1] !== 255 && image2.shape[2] !== 255)
|
||||
this.skipped = 0;
|
||||
|
@ -98884,13 +98884,9 @@ var config_default = {
|
|||
};
|
||||
|
||||
// package.json
|
||||
var version3 = "0.9.0";
|
||||
var version3 = "0.9.1";
|
||||
|
||||
// src/human.js
|
||||
const disableSkipFrames = {
|
||||
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, gender: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
||||
hand: {skipFrames: 0}
|
||||
};
|
||||
const now2 = () => {
|
||||
if (typeof performance !== "undefined")
|
||||
return performance.now();
|
||||
|
@ -99157,8 +99153,6 @@ class Human {
|
|||
this.state = "config";
|
||||
let timeStamp;
|
||||
this.config = mergeDeep(this.config, userConfig);
|
||||
if (!this.config.videoOptimized)
|
||||
this.config = mergeDeep(this.config, disableSkipFrames);
|
||||
this.state = "check";
|
||||
const error = this.sanity(input);
|
||||
if (error) {
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -12316,11 +12316,11 @@
|
|||
]
|
||||
},
|
||||
"package.json": {
|
||||
"bytes": 3616,
|
||||
"bytes": 3629,
|
||||
"imports": []
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytes": 1941,
|
||||
"bytes": 1966,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12455,7 +12455,7 @@
|
|||
]
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytes": 2979,
|
||||
"bytes": 3004,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12503,7 +12503,7 @@
|
|||
]
|
||||
},
|
||||
"src/face/facepipeline.js": {
|
||||
"bytes": 13762,
|
||||
"bytes": 13788,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12524,7 +12524,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/gender/gender.js": {
|
||||
"bytes": 3382,
|
||||
"bytes": 3407,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12562,7 +12562,7 @@
|
|||
]
|
||||
},
|
||||
"src/hand/handpipeline.js": {
|
||||
"bytes": 7532,
|
||||
"bytes": 7558,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12597,7 +12597,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 16020,
|
||||
"bytes": 15697,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/tf.js"
|
||||
|
@ -12695,7 +12695,7 @@
|
|||
"dist/human.esm.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5477319
|
||||
"bytes": 5476328
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -12740,7 +12740,7 @@
|
|||
"bytesInOutput": 51519
|
||||
},
|
||||
"src/face/facepipeline.js": {
|
||||
"bytesInOutput": 12471
|
||||
"bytesInOutput": 12498
|
||||
},
|
||||
"src/face/facemesh.js": {
|
||||
"bytesInOutput": 2461
|
||||
|
@ -12749,13 +12749,13 @@
|
|||
"bytesInOutput": 1118
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytesInOutput": 1563
|
||||
"bytesInOutput": 1589
|
||||
},
|
||||
"src/gender/gender.js": {
|
||||
"bytesInOutput": 2736
|
||||
"bytesInOutput": 2762
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytesInOutput": 2438
|
||||
"bytesInOutput": 2464
|
||||
},
|
||||
"src/embedding/embedding.js": {
|
||||
"bytesInOutput": 1769
|
||||
|
@ -12797,7 +12797,7 @@
|
|||
"bytesInOutput": 4402
|
||||
},
|
||||
"src/hand/handpipeline.js": {
|
||||
"bytesInOutput": 7165
|
||||
"bytesInOutput": 7192
|
||||
},
|
||||
"src/hand/anchors.js": {
|
||||
"bytesInOutput": 256590
|
||||
|
@ -13430,13 +13430,13 @@
|
|||
"bytesInOutput": 24
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 13501
|
||||
"bytesInOutput": 13232
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 3456039
|
||||
"bytes": 3455902
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ async function load(config) {
|
|||
|
||||
async function predict(image, config) {
|
||||
if (!models.age) return null;
|
||||
if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {
|
||||
if ((frame < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ async function load(config) {
|
|||
|
||||
async function predict(image, config) {
|
||||
if (!models.emotion) return null;
|
||||
if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {
|
||||
if ((frame < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
|
|
@ -134,7 +134,7 @@ class Pipeline {
|
|||
let useFreshBox = false;
|
||||
// run new detector every skipFrames unless we only want box to start with
|
||||
let detector;
|
||||
if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {
|
||||
if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled || !config.videoOptimized) {
|
||||
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||
// don't reset on test image
|
||||
if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;
|
||||
|
|
|
@ -21,7 +21,7 @@ async function load(config) {
|
|||
|
||||
async function predict(image, config) {
|
||||
if (!models.gender) return null;
|
||||
if ((frame < config.face.gender.skipFrames) && last.gender !== '') {
|
||||
if ((frame < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
||||
frame += 1;
|
||||
return last;
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ class HandPipeline {
|
|||
|
||||
// run new detector every skipFrames unless we only want box to start with
|
||||
let boxes;
|
||||
if ((this.skipped > config.skipFrames) || !config.landmarks) {
|
||||
if ((this.skipped > config.skipFrames) || !config.landmarks || !config.videoOptimized) {
|
||||
boxes = await this.boxDetector.estimateHandBounds(image, config);
|
||||
// don't reset on test image
|
||||
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
|
||||
|
|
|
@ -12,11 +12,6 @@ import * as profile from './profile.js';
|
|||
import * as config from '../config.js';
|
||||
import * as app from '../package.json';
|
||||
|
||||
// static config override for non-video detection
|
||||
const disableSkipFrames = {
|
||||
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },
|
||||
};
|
||||
|
||||
// helper function: gets elapsed time on both browser and nodejs
|
||||
const now = () => {
|
||||
if (typeof performance !== 'undefined') return performance.now();
|
||||
|
@ -327,7 +322,6 @@ class Human {
|
|||
|
||||
// update configuration
|
||||
this.config = mergeDeep(this.config, userConfig);
|
||||
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);
|
||||
|
||||
// sanity checks
|
||||
this.state = 'check';
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit e29ef0887f0be593ef827ee5020fe6a40f588f38
|
||||
Subproject commit 79cd119c0c5714324a0dae82477ce36e1d5f40a1
|
Loading…
Reference in New Issue