mirror of https://github.com/vladmandic/human
refactor distance
parent
1713990f66
commit
8fe34fd723
|
@ -11,6 +11,7 @@
|
||||||
|
|
||||||
### **HEAD -> main** 2022/11/16 mandic00@live.com
|
### **HEAD -> main** 2022/11/16 mandic00@live.com
|
||||||
|
|
||||||
|
- added webcam id specification
|
||||||
- include external typedefs
|
- include external typedefs
|
||||||
- prepare external typedefs
|
- prepare external typedefs
|
||||||
- rebuild all
|
- rebuild all
|
||||||
|
|
2
TODO.md
2
TODO.md
|
@ -70,7 +70,7 @@ Features:
|
||||||
- Support selecting specific video source when multiple cameras are present
|
- Support selecting specific video source when multiple cameras are present
|
||||||
See `human.webcam.enumerate()`
|
See `human.webcam.enumerate()`
|
||||||
- Updated algorithm to determine distance from camera based on iris size
|
- Updated algorithm to determine distance from camera based on iris size
|
||||||
See `human.result.face[n].iris`
|
See `human.result.face[n].distance`
|
||||||
|
|
||||||
Architecture:
|
Architecture:
|
||||||
- Reduce build dependencies
|
- Reduce build dependencies
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -42,6 +42,8 @@ const options = {
|
||||||
blinkMin: 10, // minimum duration of a valid blink
|
blinkMin: 10, // minimum duration of a valid blink
|
||||||
blinkMax: 800, // maximum duration of a valid blink
|
blinkMax: 800, // maximum duration of a valid blink
|
||||||
threshold: 0.5, // minimum similarity
|
threshold: 0.5, // minimum similarity
|
||||||
|
distanceMin: 0.4, // closest that face is allowed to be to the cammera in cm
|
||||||
|
distanceMax: 1.0, // farthest that face is allowed to be to the cammera in cm
|
||||||
mask: humanConfig.face.detector.mask,
|
mask: humanConfig.face.detector.mask,
|
||||||
rotation: humanConfig.face.detector.rotation,
|
rotation: humanConfig.face.detector.rotation,
|
||||||
cropFactor: humanConfig.face.detector.cropFactor,
|
cropFactor: humanConfig.face.detector.cropFactor,
|
||||||
|
@ -57,6 +59,7 @@ const ok: Record<string, { status: boolean | undefined, val: number }> = { // mu
|
||||||
faceSize: { status: false, val: 0 },
|
faceSize: { status: false, val: 0 },
|
||||||
antispoofCheck: { status: false, val: 0 },
|
antispoofCheck: { status: false, val: 0 },
|
||||||
livenessCheck: { status: false, val: 0 },
|
livenessCheck: { status: false, val: 0 },
|
||||||
|
distance: { status: false, val: 0 },
|
||||||
age: { status: false, val: 0 },
|
age: { status: false, val: 0 },
|
||||||
gender: { status: false, val: 0 },
|
gender: { status: false, val: 0 },
|
||||||
timeout: { status: true, val: 0 },
|
timeout: { status: true, val: 0 },
|
||||||
|
@ -74,6 +77,7 @@ const allOk = () => ok.faceCount.status
|
||||||
&& ok.faceConfidence.status
|
&& ok.faceConfidence.status
|
||||||
&& ok.antispoofCheck.status
|
&& ok.antispoofCheck.status
|
||||||
&& ok.livenessCheck.status
|
&& ok.livenessCheck.status
|
||||||
|
&& ok.distance.status
|
||||||
&& ok.descriptor.status
|
&& ok.descriptor.status
|
||||||
&& ok.age.status
|
&& ok.age.status
|
||||||
&& ok.gender.status;
|
&& ok.gender.status;
|
||||||
|
@ -188,6 +192,8 @@ async function validationLoop(): Promise<H.FaceResult> { // main screen refresh
|
||||||
ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence;
|
ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence;
|
||||||
ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]);
|
ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]);
|
||||||
ok.faceSize.status = ok.faceSize.val >= options.minSize;
|
ok.faceSize.status = ok.faceSize.val >= options.minSize;
|
||||||
|
ok.distance.val = human.result.face[0].distance || 0;
|
||||||
|
ok.distance.status = (ok.distance.val >= options.distanceMin) && (ok.distance.val <= options.distanceMax);
|
||||||
ok.descriptor.val = human.result.face[0].embedding?.length || 0;
|
ok.descriptor.val = human.result.face[0].embedding?.length || 0;
|
||||||
ok.descriptor.status = ok.descriptor.val > 0;
|
ok.descriptor.status = ok.descriptor.val > 0;
|
||||||
ok.age.val = human.result.face[0].age || 0;
|
ok.age.val = human.result.face[0].age || 0;
|
||||||
|
@ -234,7 +240,7 @@ async function detectFace() {
|
||||||
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
|
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
|
||||||
if (!current?.face?.tensor || !current?.face?.embedding) return false;
|
if (!current?.face?.tensor || !current?.face?.embedding) return false;
|
||||||
console.log('face record:', current.face); // eslint-disable-line no-console
|
console.log('face record:', current.face); // eslint-disable-line no-console
|
||||||
log(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${current.face.iris || 0}cm/${Math.round(100 * (current.face.iris || 0) / 2.54) / 100}in`);
|
log(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${100 * (current.face.distance || 0)}cm/${Math.round(100 * (current.face.distance || 0) / 2.54)}in`);
|
||||||
await human.tf.browser.toPixels(current.face.tensor, dom.canvas);
|
await human.tf.browser.toPixels(current.face.tensor, dom.canvas);
|
||||||
if (await indexDb.count() === 0) {
|
if (await indexDb.count() === 0) {
|
||||||
log('face database is empty: nothing to compare face with');
|
log('face database is empty: nothing to compare face with');
|
||||||
|
|
|
@ -78,7 +78,7 @@ async function main() {
|
||||||
const persons = human.result.persons;
|
const persons = human.result.persons;
|
||||||
for (let i = 0; i < persons.length; i++) {
|
for (let i = 0; i < persons.length; i++) {
|
||||||
const face = persons[i].face;
|
const face = persons[i].face;
|
||||||
const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
|
const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.distance}` : null;
|
||||||
const body = persons[i].body;
|
const body = persons[i].body;
|
||||||
const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints?.length}` : null;
|
const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints?.length}` : null;
|
||||||
log.data(` #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
|
log.data(` #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
|
||||||
|
|
|
@ -105,7 +105,7 @@ async function detect(input) {
|
||||||
for (let i = 0; i < result.face.length; i++) {
|
for (let i = 0; i < result.face.length; i++) {
|
||||||
const face = result.face[i];
|
const face = result.face[i];
|
||||||
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
||||||
log.data(` Face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
|
log.data(` Face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} distance:${face.distance}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.data(' Face: N/A');
|
log.data(' Face: N/A');
|
||||||
|
|
|
@ -4,6 +4,100 @@
|
||||||
author: <https://github.com/vladmandic>'
|
author: <https://github.com/vladmandic>'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import*as m from"../../dist/human.esm.js";var f=1920,b={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1,width:f},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!1},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
|
||||||
`,console.log(...t)},r=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function u(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(u)}async function p(){var d,i,c;if(!a.video.paused){let l=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let v={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,v),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(p,30)}async function h(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!0,width:f,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),o("environment",e.env),r("initializing..."),await e.warmup(),await h(),await u(),await p()}window.onload=y;
|
// demo/typescript/index.ts
|
||||||
|
import * as H from "../../dist/human.esm.js";
|
||||||
|
var width = 1920;
|
||||||
|
var humanConfig = {
|
||||||
|
modelBasePath: "../../models",
|
||||||
|
filter: { enabled: true, equalization: false, flip: false, width },
|
||||||
|
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||||
|
body: { enabled: true },
|
||||||
|
hand: { enabled: false },
|
||||||
|
object: { enabled: false },
|
||||||
|
segmentation: { enabled: false },
|
||||||
|
gesture: { enabled: true }
|
||||||
|
};
|
||||||
|
var human = new H.Human(humanConfig);
|
||||||
|
human.env.perfadd = false;
|
||||||
|
human.draw.options.font = 'small-caps 18px "Lato"';
|
||||||
|
human.draw.options.lineHeight = 20;
|
||||||
|
var dom = {
|
||||||
|
video: document.getElementById("video"),
|
||||||
|
canvas: document.getElementById("canvas"),
|
||||||
|
log: document.getElementById("log"),
|
||||||
|
fps: document.getElementById("status"),
|
||||||
|
perf: document.getElementById("performance")
|
||||||
|
};
|
||||||
|
var timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 };
|
||||||
|
var fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 };
|
||||||
|
var log = (...msg) => {
|
||||||
|
dom.log.innerText += msg.join(" ") + "\n";
|
||||||
|
console.log(...msg);
|
||||||
|
};
|
||||||
|
var status = (msg) => dom.fps.innerText = msg;
|
||||||
|
var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors.toString() + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
|
||||||
|
async function detectionLoop() {
|
||||||
|
if (!dom.video.paused) {
|
||||||
|
if (timestamp.start === 0)
|
||||||
|
timestamp.start = human.now();
|
||||||
|
await human.detect(dom.video);
|
||||||
|
const tensors = human.tf.memory().numTensors;
|
||||||
|
if (tensors - timestamp.tensors !== 0)
|
||||||
|
log("allocated tensors:", tensors - timestamp.tensors);
|
||||||
|
timestamp.tensors = tensors;
|
||||||
|
fps.detectFPS = Math.round(1e3 * 1e3 / (human.now() - timestamp.detect)) / 1e3;
|
||||||
|
fps.frames++;
|
||||||
|
fps.averageMs = Math.round(1e3 * (human.now() - timestamp.start) / fps.frames) / 1e3;
|
||||||
|
if (fps.frames % 100 === 0 && !dom.video.paused)
|
||||||
|
log("performance", { ...fps, tensors: timestamp.tensors });
|
||||||
|
}
|
||||||
|
timestamp.detect = human.now();
|
||||||
|
requestAnimationFrame(detectionLoop);
|
||||||
|
}
|
||||||
|
async function drawLoop() {
|
||||||
|
var _a, _b, _c;
|
||||||
|
if (!dom.video.paused) {
|
||||||
|
const interpolated = human.next(human.result);
|
||||||
|
const processed = await human.image(dom.video);
|
||||||
|
human.draw.canvas(processed.canvas, dom.canvas);
|
||||||
|
const opt = { bodyLabels: `person confidence [score] and ${(_c = (_b = (_a = human.result) == null ? void 0 : _a.body) == null ? void 0 : _b[0]) == null ? void 0 : _c.keypoints.length} keypoints` };
|
||||||
|
await human.draw.all(dom.canvas, interpolated, opt);
|
||||||
|
perf(interpolated.performance);
|
||||||
|
}
|
||||||
|
const now = human.now();
|
||||||
|
fps.drawFPS = Math.round(1e3 * 1e3 / (now - timestamp.draw)) / 1e3;
|
||||||
|
timestamp.draw = now;
|
||||||
|
status(dom.video.paused ? "paused" : `fps: ${fps.detectFPS.toFixed(1).padStart(5, " ")} detect | ${fps.drawFPS.toFixed(1).padStart(5, " ")} draw`);
|
||||||
|
setTimeout(drawLoop, 30);
|
||||||
|
}
|
||||||
|
async function webCam() {
|
||||||
|
const devices = await human.webcam.enumerate();
|
||||||
|
const id = devices[0].deviceId;
|
||||||
|
await human.webcam.start({ element: dom.video, crop: true, width, id });
|
||||||
|
dom.canvas.width = human.webcam.width;
|
||||||
|
dom.canvas.height = human.webcam.height;
|
||||||
|
dom.canvas.onclick = async () => {
|
||||||
|
if (human.webcam.paused)
|
||||||
|
await human.webcam.play();
|
||||||
|
else
|
||||||
|
human.webcam.pause();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async function main() {
|
||||||
|
log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]);
|
||||||
|
log("platform:", human.env.platform, "| agent:", human.env.agent);
|
||||||
|
status("loading...");
|
||||||
|
await human.load();
|
||||||
|
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);
|
||||||
|
log("models stats:", human.getModelStats());
|
||||||
|
log("models loaded:", Object.values(human.models).filter((model) => model !== null).length);
|
||||||
|
log("environment", human.env);
|
||||||
|
status("initializing...");
|
||||||
|
await human.warmup();
|
||||||
|
await webCam();
|
||||||
|
await detectionLoop();
|
||||||
|
await drawLoop();
|
||||||
|
}
|
||||||
|
window.onload = main;
|
||||||
//# sourceMappingURL=index.js.map
|
//# sourceMappingURL=index.js.map
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -15,7 +15,7 @@ function drawLabels(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanv
|
||||||
if (f.gender) l = replace(l, '[gender]', f.gender);
|
if (f.gender) l = replace(l, '[gender]', f.gender);
|
||||||
if (f.genderScore) l = replace(l, '[genderScore]', 100 * f.genderScore);
|
if (f.genderScore) l = replace(l, '[genderScore]', 100 * f.genderScore);
|
||||||
if (f.age) l = replace(l, '[age]', f.age);
|
if (f.age) l = replace(l, '[age]', f.age);
|
||||||
if (f.iris) l = replace(l, '[distance]', 100 * f.iris);
|
if (f.distance) l = replace(l, '[distance]', 100 * f.distance);
|
||||||
if (f.real) l = replace(l, '[real]', 100 * f.real);
|
if (f.real) l = replace(l, '[real]', 100 * f.real);
|
||||||
if (f.live) l = replace(l, '[live]', 100 * f.live);
|
if (f.live) l = replace(l, '[live]', 100 * f.live);
|
||||||
if (f.emotion && f.emotion.length > 0) {
|
if (f.emotion && f.emotion.length > 0) {
|
||||||
|
|
|
@ -216,7 +216,7 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
if (emotionRes) res.emotion = emotionRes as { score: number, emotion: Emotion }[];
|
if (emotionRes) res.emotion = emotionRes as { score: number, emotion: Emotion }[];
|
||||||
if (antispoofRes) res.real = antispoofRes as number;
|
if (antispoofRes) res.real = antispoofRes as number;
|
||||||
if (livenessRes) res.live = livenessRes as number;
|
if (livenessRes) res.live = livenessRes as number;
|
||||||
if (irisSize > 0) res.iris = irisSize;
|
if (irisSize > 0) res.distance = irisSize;
|
||||||
if (rotation) res.rotation = rotation;
|
if (rotation) res.rotation = rotation;
|
||||||
if (tensor) res.tensor = tensor;
|
if (tensor) res.tensor = tensor;
|
||||||
faceRes.push(res);
|
faceRes.push(res);
|
||||||
|
|
12
src/human.ts
12
src/human.ts
|
@ -41,7 +41,7 @@ import * as selfie from './segmentation/selfie';
|
||||||
import * as warmups from './warmup';
|
import * as warmups from './warmup';
|
||||||
|
|
||||||
// type definitions
|
// type definitions
|
||||||
import type { Input, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult, AnyCanvas } from './exports';
|
import { Input, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult, AnyCanvas, emptyResult } from './exports';
|
||||||
import type { Tensor, Tensor4D } from './tfjs/types';
|
import type { Tensor, Tensor4D } from './tfjs/types';
|
||||||
// type exports
|
// type exports
|
||||||
export * from './exports';
|
export * from './exports';
|
||||||
|
@ -167,7 +167,7 @@ export class Human {
|
||||||
person: (output: AnyCanvas, result: PersonResult[], options?: Partial<DrawOptions>) => draw.person(output, result, options),
|
person: (output: AnyCanvas, result: PersonResult[], options?: Partial<DrawOptions>) => draw.person(output, result, options),
|
||||||
all: (output: AnyCanvas, result: Result, options?: Partial<DrawOptions>) => draw.all(output, result, options),
|
all: (output: AnyCanvas, result: Result, options?: Partial<DrawOptions>) => draw.all(output, result, options),
|
||||||
};
|
};
|
||||||
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [], error: null };
|
this.result = emptyResult();
|
||||||
// export access to image processing
|
// export access to image processing
|
||||||
this.process = { tensor: null, canvas: null };
|
this.process = { tensor: null, canvas: null };
|
||||||
// export raw access to underlying models
|
// export raw access to underlying models
|
||||||
|
@ -428,7 +428,7 @@ export class Human {
|
||||||
if (error) {
|
if (error) {
|
||||||
log(error, input);
|
log(error, input);
|
||||||
this.emit('error');
|
this.emit('error');
|
||||||
resolve({ face: [], body: [], hand: [], gesture: [], object: [], performance: this.performance, timestamp: now(), persons: [], error });
|
resolve(emptyResult(error));
|
||||||
}
|
}
|
||||||
|
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
|
@ -446,7 +446,7 @@ export class Human {
|
||||||
if (!img.tensor) {
|
if (!img.tensor) {
|
||||||
if (this.config.debug) log('could not convert input to tensor');
|
if (this.config.debug) log('could not convert input to tensor');
|
||||||
this.emit('error');
|
this.emit('error');
|
||||||
resolve({ face: [], body: [], hand: [], gesture: [], object: [], performance: this.performance, timestamp: now(), persons: [], error: 'could not convert input to tensor' });
|
resolve(emptyResult('could not convert input to tensor'));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.emit('image');
|
this.emit('image');
|
||||||
|
@ -547,7 +547,7 @@ export class Human {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart);
|
this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart);
|
||||||
const shape = this.process.tensor?.shape || [];
|
const shape = this.process.tensor?.shape || [0, 0, 0, 0];
|
||||||
this.result = {
|
this.result = {
|
||||||
face: faceRes as FaceResult[],
|
face: faceRes as FaceResult[],
|
||||||
body: bodyRes as BodyResult[],
|
body: bodyRes as BodyResult[],
|
||||||
|
@ -558,6 +558,8 @@ export class Human {
|
||||||
canvas: this.process.canvas,
|
canvas: this.process.canvas,
|
||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
error: null,
|
error: null,
|
||||||
|
width: shape[2],
|
||||||
|
height: shape[1],
|
||||||
get persons() { return persons.join(faceRes as FaceResult[], bodyRes as BodyResult[], handRes as HandResult[], gestureRes, shape); },
|
get persons() { return persons.join(faceRes as FaceResult[], bodyRes as BodyResult[], handRes as HandResult[], gestureRes, shape); },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -59,8 +59,8 @@ export interface FaceResult {
|
||||||
race?: { score: number, race: Race }[],
|
race?: { score: number, race: Race }[],
|
||||||
/** face descriptor */
|
/** face descriptor */
|
||||||
embedding?: number[],
|
embedding?: number[],
|
||||||
/** face iris distance from camera */
|
/** face distance from camera */
|
||||||
iris?: number,
|
distance?: number,
|
||||||
/** face anti-spoofing result confidence */
|
/** face anti-spoofing result confidence */
|
||||||
real?: number,
|
real?: number,
|
||||||
/** face liveness result confidence */
|
/** face liveness result confidence */
|
||||||
|
@ -225,4 +225,10 @@ export interface Result {
|
||||||
persons: PersonResult[],
|
persons: PersonResult[],
|
||||||
/** Last known error message */
|
/** Last known error message */
|
||||||
error: string | null;
|
error: string | null;
|
||||||
|
/** Resolution width */
|
||||||
|
width: number,
|
||||||
|
/** Resolution height */
|
||||||
|
height: number,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const emptyResult = (error: string | null = null): Result => ({ face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0, width: 0, height: 0, error });
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
* Results interpolation for smoothening of video detection results inbetween detected frames
|
* Results interpolation for smoothening of video detection results inbetween detected frames
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Result, FaceResult, BodyResult, HandResult, ObjectResult, PersonResult, Box, Point, BodyLandmark, BodyAnnotation } from '../result';
|
import { Result, FaceResult, BodyResult, HandResult, ObjectResult, PersonResult, Box, Point, BodyLandmark, BodyAnnotation, emptyResult } from '../result';
|
||||||
import type { Config } from '../config';
|
import type { Config } from '../config';
|
||||||
|
|
||||||
import * as moveNetCoords from '../body/movenetcoords';
|
import * as moveNetCoords from '../body/movenetcoords';
|
||||||
|
@ -11,12 +11,12 @@ import * as efficientPoseCoords from '../body/efficientposecoords';
|
||||||
import { now } from './util';
|
import { now } from './util';
|
||||||
import { env } from './env';
|
import { env } from './env';
|
||||||
|
|
||||||
const bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0, error: null };
|
const bufferedResult: Result = emptyResult();
|
||||||
let interpolateTime = 0;
|
let interpolateTime = 0;
|
||||||
|
|
||||||
export function calc(newResult: Result, config: Config): Result {
|
export function calc(newResult: Result, config: Config): Result {
|
||||||
const t0 = now();
|
const t0 = now();
|
||||||
if (!newResult) return { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0, error: null };
|
if (!newResult) return emptyResult();
|
||||||
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
|
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
|
||||||
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
|
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
|
||||||
// thus mixing by-reference and by-value assignments to minimize memory operations
|
// thus mixing by-reference and by-value assignments to minimize memory operations
|
||||||
|
@ -175,9 +175,13 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// just copy latest gestures without interpolation
|
// copy latest gestures without interpolation
|
||||||
if (newResult.gesture) bufferedResult.gesture = newResult.gesture;
|
if (newResult.gesture) bufferedResult.gesture = newResult.gesture;
|
||||||
|
|
||||||
|
// copy resolution info
|
||||||
|
bufferedResult.width = newResult.width;
|
||||||
|
bufferedResult.height = newResult.height;
|
||||||
|
|
||||||
// append interpolation performance data
|
// append interpolation performance data
|
||||||
const t1 = now();
|
const t1 = now();
|
||||||
interpolateTime = env.perfadd ? interpolateTime + Math.round(t1 - t0) : Math.round(t1 - t0);
|
interpolateTime = env.perfadd ? interpolateTime + Math.round(t1 - t0) : Math.round(t1 - t0);
|
||||||
|
|
|
@ -9,7 +9,7 @@ import * as image from './image/image';
|
||||||
import * as backend from './tfjs/backend';
|
import * as backend from './tfjs/backend';
|
||||||
import { env } from './util/env';
|
import { env } from './util/env';
|
||||||
import type { Config } from './config';
|
import type { Config } from './config';
|
||||||
import type { Result } from './result';
|
import { emptyResult, Result } from './result';
|
||||||
import { Human, models } from './human';
|
import { Human, models } from './human';
|
||||||
import type { Tensor, DataType } from './tfjs/types';
|
import type { Tensor, DataType } from './tfjs/types';
|
||||||
|
|
||||||
|
@ -158,7 +158,7 @@ export async function warmup(instance: Human, userConfig?: Partial<Config>): Pro
|
||||||
instance.state = 'warmup';
|
instance.state = 'warmup';
|
||||||
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
|
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') {
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') {
|
||||||
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
return emptyResult();
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
await models.load(instance);
|
await models.load(instance);
|
||||||
|
|
155
test/build.log
155
test/build.log
|
@ -1,50 +1,105 @@
|
||||||
2022-11-16 17:46:46 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
2022-11-17 10:11:23 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||||
2022-11-16 17:46:46 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
2022-11-17 10:11:23 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||||
2022-11-16 17:46:46 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
2022-11-17 10:11:23 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||||
2022-11-16 17:46:46 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"}
|
2022-11-17 10:11:23 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"}
|
||||||
2022-11-16 17:46:46 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
2022-11-17 10:11:23 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
2022-11-17 10:11:23 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":673738,"outputBytes":317961}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":673583,"outputBytes":317615}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":673742,"outputBytes":317965}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":673587,"outputBytes":317619}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674690,"outputBytes":318076}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674535,"outputBytes":317730}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673484,"outputBytes":316540}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673329,"outputBytes":316181}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1817714,"outputBytes":1458000}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1817559,"outputBytes":1457643}
|
||||||
2022-11-16 17:46:46 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1817714,"outputBytes":1917498}
|
2022-11-17 10:11:23 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1817559,"outputBytes":1917209}
|
||||||
2022-11-16 17:46:50 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
2022-11-17 10:11:27 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||||
2022-11-16 17:46:52 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
2022-11-17 10:11:29 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true}
|
||||||
2022-11-16 17:46:52 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":2914}
|
2022-11-17 10:11:29 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":2914}
|
||||||
2022-11-16 17:46:52 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17166,"outputBytes":9243}
|
2022-11-17 10:11:29 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17174,"outputBytes":9251}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":114,"errors":0,"warnings":0}
|
2022-11-17 10:11:37 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":114,"errors":0,"warnings":0}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
2022-11-17 10:11:37 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||||
2022-11-16 17:47:01 [36mINFO: [39m Done...
|
2022-11-17 10:11:37 [36mINFO: [39m Done...
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||||
2022-11-16 17:47:01 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
2022-11-17 10:11:37 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
2022-11-17 10:11:38 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
2022-11-17 10:11:38 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||||
2022-11-16 17:47:02 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
2022-11-17 10:11:38 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
2022-11-17 10:11:38 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||||
2022-11-16 17:47:02 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
2022-11-17 10:11:39 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||||
2022-11-16 17:47:02 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
2022-11-17 10:11:39 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m @vladmandic/human version 3.0.0
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v19.1.0
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m Environment: {"profile":"development","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"}
|
||||||
|
2022-11-17 10:16:08 [36mINFO: [39m Build: {"profile":"development","steps":["serve","watch","compile"]}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m WebServer: {"ssl":false,"port":8000,"root":"."}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m WebServer: {"ssl":true,"port":8001,"root":".","sslKey":"node_modules/@vladmandic/build/cert/https.key","sslCrt":"node_modules/@vladmandic/build/cert/https.crt"}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Watch: {"locations":["src/**/*","tfjs/**/*","demo/**/*.ts"]}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":1357}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1565,"outputBytes":1786}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":674445,"outputBytes":507569}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1573,"outputBytes":1810}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":674469,"outputBytes":507589}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1661,"outputBytes":1992}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674651,"outputBytes":507780}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2371,"outputBytes":923}
|
||||||
|
2022-11-17 10:16:08 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673582,"outputBytes":510177}
|
||||||
|
2022-11-17 10:16:09 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":2371,"outputBytes":1144900}
|
||||||
|
2022-11-17 10:16:09 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1817559,"outputBytes":1457643}
|
||||||
|
2022-11-17 10:16:09 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1817559,"outputBytes":1917209}
|
||||||
|
2022-11-17 10:16:09 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":4208}
|
||||||
|
2022-11-17 10:16:09 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17567,"outputBytes":13914}
|
||||||
|
2022-11-17 10:16:09 [36mINFO: [39m Listening...
|
||||||
|
2022-11-17 10:16:20 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/typescript","redirect":"/demo/typescript/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:16:20 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":1953,"url":"/demo/typescript/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:16:20 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":4208,"url":"/demo/typescript/index.js","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/faceid","redirect":"/demo/faceid/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":3415,"url":"/demo/faceid/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":13914,"url":"/demo/faceid/index.js","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"}
|
||||||
|
2022-11-17 10:16:22 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"}
|
||||||
|
2022-11-17 10:16:23 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"}
|
||||||
|
2022-11-17 10:16:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":28470,"url":"/demo/faceid/index.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:16:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":3415,"url":"/demo/faceid/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":13914,"url":"/demo/faceid/index.js","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":28470,"url":"/demo/faceid/index.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/x-icon","size":261950,"url":"/favicon.ico","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"}
|
||||||
|
2022-11-17 10:16:39 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/typescript","redirect":"/demo/typescript/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":1953,"url":"/demo/typescript/index.html","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":4208,"url":"/demo/typescript/index.js","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":9470,"url":"/demo/typescript/index.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"}
|
||||||
|
2022-11-17 10:17:25 [32mDATA: [39m HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"}
|
||||||
|
|
|
@ -189,7 +189,7 @@ async function verifyDetails(human) {
|
||||||
verify(res.face.length === 1, 'details face length', res.face.length);
|
verify(res.face.length === 1, 'details face length', res.face.length);
|
||||||
for (const face of res.face) {
|
for (const face of res.face) {
|
||||||
verify(face.score > 0.9 && face.boxScore > 0.9 && face.faceScore > 0.9, 'details face score', face.score, face.boxScore, face.faceScore);
|
verify(face.score > 0.9 && face.boxScore > 0.9 && face.faceScore > 0.9, 'details face score', face.score, face.boxScore, face.faceScore);
|
||||||
verify(face.age > 23 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.iris > 0.5 && face.iris < 2.5, 'details face age/gender', face.age, face.gender, face.genderScore, face.iris);
|
verify(face.age > 23 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.iris > 0.5 && face.distance < 2.5, 'details face age/gender', face.age, face.gender, face.genderScore, face.distance);
|
||||||
verify(face.box.length === 4 && face.boxRaw.length === 4 && face.mesh.length === 478 && face.meshRaw.length === 478 && face.embedding.length === 1024, 'details face arrays', face.box.length, face.mesh.length, face.embedding.length);
|
verify(face.box.length === 4 && face.boxRaw.length === 4 && face.mesh.length === 478 && face.meshRaw.length === 478 && face.embedding.length === 1024, 'details face arrays', face.box.length, face.mesh.length, face.embedding.length);
|
||||||
verify(face.emotion.length >= 2 && face.emotion[0].score > 0.30 && face.emotion[0].emotion === 'angry', 'details face emotion', face.emotion.length, face.emotion[0]);
|
verify(face.emotion.length >= 2 && face.emotion[0].score > 0.30 && face.emotion[0].emotion === 'angry', 'details face emotion', face.emotion.length, face.emotion[0]);
|
||||||
verify(face.real > 0.55, 'details face anti-spoofing', face.real);
|
verify(face.real > 0.55, 'details face anti-spoofing', face.real);
|
||||||
|
|
Loading…
Reference in New Issue