mirror of https://github.com/vladmandic/human
fix for human.draw labels and typedefs
parent
b28327d063
commit
06f20e86c2
|
@ -9,8 +9,9 @@
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/10/19 mandic00@live.com
|
### **HEAD -> main** 2021/10/21 mandic00@live.com
|
||||||
|
|
||||||
|
- add human.custom.esm using custom tfjs build
|
||||||
|
|
||||||
### **2.3.5** 2021/10/19 mandic00@live.com
|
### **2.3.5** 2021/10/19 mandic00@live.com
|
||||||
|
|
||||||
|
|
|
@ -63,6 +63,7 @@ const drawOptions = {
|
||||||
drawBoxes: true,
|
drawBoxes: true,
|
||||||
drawGaze: true,
|
drawGaze: true,
|
||||||
drawLabels: true,
|
drawLabels: true,
|
||||||
|
drawGestures: true,
|
||||||
drawPolygons: true,
|
drawPolygons: true,
|
||||||
drawPoints: false,
|
drawPoints: false,
|
||||||
fillPolygons: false,
|
fillPolygons: false,
|
||||||
|
|
|
@ -48,11 +48,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
parent.analyze('Start Emotion:');
|
parent.analyze('Start Emotion:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:emotion';
|
parent.state = 'run:emotion';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End Emotion:');
|
parent.analyze('End Emotion:');
|
||||||
|
@ -60,11 +60,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
// run antispoof, inherits face from blazeface
|
// run antispoof, inherits face from blazeface
|
||||||
parent.analyze('Start AntiSpoof:');
|
parent.analyze('Start AntiSpoof:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
antispoofRes = parent.config.face.antispoof.enabled ? antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
antispoofRes = parent.config.face.antispoof.enabled ? antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:antispoof';
|
parent.state = 'run:antispoof';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
parent.performance.antispoof = Math.trunc(now() - timeStamp);
|
parent.performance.antispoof = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End AntiSpoof:');
|
parent.analyze('End AntiSpoof:');
|
||||||
|
@ -86,11 +86,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
parent.analyze('Start Description:');
|
parent.analyze('Start Description:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:description';
|
parent.state = 'run:description';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
|
||||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End Description:');
|
parent.analyze('End Description:');
|
||||||
|
@ -124,10 +124,10 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
age: descRes.age,
|
age: descRes?.age,
|
||||||
gender: descRes.gender,
|
gender: descRes?.gender,
|
||||||
genderScore: descRes.genderScore,
|
genderScore: descRes?.genderScore,
|
||||||
embedding: descRes.descriptor,
|
embedding: descRes?.descriptor,
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
real: antispoofRes,
|
real: antispoofRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
||||||
|
|
|
@ -135,7 +135,7 @@ export class Human {
|
||||||
* - canvas: draw processed canvas which is a processed copy of the input
|
* - canvas: draw processed canvas which is a processed copy of the input
|
||||||
* - all: meta-function that performs: canvas, face, body, hand
|
* - all: meta-function that performs: canvas, face, body, hand
|
||||||
*/
|
*/
|
||||||
draw: { canvas, face, body, hand, gesture, object, person, all, options: DrawOptions };
|
draw: { canvas: typeof draw.canvas, face: typeof draw.face, body: typeof draw.body, hand: typeof draw.hand, gesture: typeof draw.gesture, object: typeof draw.object, person: typeof draw.person, all: typeof draw.all, options: DrawOptions };
|
||||||
|
|
||||||
/** Currently loaded models
|
/** Currently loaded models
|
||||||
* @internal
|
* @internal
|
||||||
|
|
|
@ -37,6 +37,7 @@ export interface DrawOptions {
|
||||||
roundRect: number,
|
roundRect: number,
|
||||||
drawPoints: boolean,
|
drawPoints: boolean,
|
||||||
drawLabels: boolean,
|
drawLabels: boolean,
|
||||||
|
drawGestures: boolean,
|
||||||
drawBoxes: boolean,
|
drawBoxes: boolean,
|
||||||
drawPolygons: boolean,
|
drawPolygons: boolean,
|
||||||
drawGaze: boolean,
|
drawGaze: boolean,
|
||||||
|
@ -58,6 +59,7 @@ export const options: DrawOptions = {
|
||||||
drawPoints: <boolean>false,
|
drawPoints: <boolean>false,
|
||||||
drawLabels: <boolean>true,
|
drawLabels: <boolean>true,
|
||||||
drawBoxes: <boolean>true,
|
drawBoxes: <boolean>true,
|
||||||
|
drawGestures: <boolean>true,
|
||||||
drawPolygons: <boolean>true,
|
drawPolygons: <boolean>true,
|
||||||
drawGaze: <boolean>true,
|
drawGaze: <boolean>true,
|
||||||
fillPolygons: <boolean>false,
|
fillPolygons: <boolean>false,
|
||||||
|
@ -166,6 +168,7 @@ function arrow(ctx: CanvasRenderingContext2D, from: Point, to: Point, radius = 5
|
||||||
export async function gesture(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<GestureResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function gesture(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<GestureResult>, drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
|
if (localOptions.drawGestures) {
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
ctx.font = localOptions.font;
|
ctx.font = localOptions.font;
|
||||||
ctx.fillStyle = localOptions.color;
|
ctx.fillStyle = localOptions.color;
|
||||||
|
@ -187,6 +190,7 @@ export async function gesture(inCanvas: HTMLCanvasElement | OffscreenCanvas, res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<FaceResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<FaceResult>, drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
|
@ -197,13 +201,14 @@ export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
|
||||||
ctx.strokeStyle = localOptions.color;
|
ctx.strokeStyle = localOptions.color;
|
||||||
ctx.fillStyle = localOptions.color;
|
ctx.fillStyle = localOptions.color;
|
||||||
if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||||
|
if (localOptions.drawLabels) {
|
||||||
// silly hack since fillText does not suport new line
|
// silly hack since fillText does not suport new line
|
||||||
const labels:string[] = [];
|
const labels:string[] = [];
|
||||||
labels.push(`face: ${Math.trunc(100 * f.score)}%`);
|
labels.push(`face: ${Math.trunc(100 * f.score)}%`);
|
||||||
if (f.genderScore) labels.push(`${f.gender || ''} ${Math.trunc(100 * f.genderScore)}%`);
|
if (f.genderScore) labels.push(`${f.gender || ''} ${Math.trunc(100 * f.genderScore)}%`);
|
||||||
if (f.age) labels.push(`age: ${f.age || ''}`);
|
if (f.age) labels.push(`age: ${f.age || ''}`);
|
||||||
if (f.iris) labels.push(`distance: ${f.iris}`);
|
if (f.iris) labels.push(`distance: ${f.iris}`);
|
||||||
if (f.real) labels.push(`Real: ${Math.trunc(100 * f.real)}%`);
|
if (f.real) labels.push(`real: ${Math.trunc(100 * f.real)}%`);
|
||||||
if (f.emotion && f.emotion.length > 0) {
|
if (f.emotion && f.emotion.length > 0) {
|
||||||
const emotion = f.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);
|
const emotion = f.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);
|
||||||
if (emotion.length > 3) emotion.length = 3;
|
if (emotion.length > 3) emotion.length = 3;
|
||||||
|
@ -225,6 +230,7 @@ export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
|
||||||
ctx.fillStyle = localOptions.labelColor;
|
ctx.fillStyle = localOptions.labelColor;
|
||||||
ctx.fillText(labels[i], x + 4, y + 15);
|
ctx.fillText(labels[i], x + 4, y + 15);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
ctx.lineWidth = 1;
|
ctx.lineWidth = 1;
|
||||||
if (f.mesh && f.mesh.length > 0) {
|
if (f.mesh && f.mesh.length > 0) {
|
||||||
if (localOptions.drawPoints) {
|
if (localOptions.drawPoints) {
|
||||||
|
|
Loading…
Reference in New Issue