diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c523006..3ec4c598 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,13 +9,14 @@ Repository: **** ## Changelog +### **HEAD -> main** 2021/05/23 mandic00@live.com + + ### **1.9.3** 2021/05/23 mandic00@live.com - use green weighted for input diff calculation - implement experimental drawoptions.bufferedoutput and bufferedfactor - -### **origin/main** 2021/05/22 mandic00@live.com - +- use explicit tensor interface - add tfjs types and remove all instances of any - enhance strong typing - rebuild all for release diff --git a/demo/index.js b/demo/index.js index 979c9bd8..e81e81be 100644 --- a/demo/index.js +++ b/demo/index.js @@ -18,18 +18,18 @@ const userConfig = { enabled: false, flip: false, }, - face: { enabled: false, - detector: { return: false }, + face: { enabled: true, + detector: { return: true }, mesh: { enabled: true }, iris: { enabled: true }, description: { enabled: false }, emotion: { enabled: false }, }, - hand: { enabled: true }, - gesture: { enabled: true }, - body: { enabled: true, modelPath: 'posenet.json' }, + hand: { enabled: false }, + body: { enabled: false, modelPath: 'posenet.json' }, // body: { enabled: true, modelPath: 'blazepose.json' }, object: { enabled: false }, + gesture: { enabled: true }, */ }; diff --git a/src/face.ts b/src/face.ts index dd4901c2..e2cdc7cd 100644 --- a/src/face.ts +++ b/src/face.ts @@ -161,8 +161,8 @@ export const detectFace = async (parent, input): Promise => { delete faces[i].annotations.rightEyeIris; } const irisSize = (faces[i].annotations?.leftEyeIris && faces[i].annotations?.rightEyeIris) - /* average human iris size is 11.7mm */ - ? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) + /* note: average human iris size is 11.7mm */ + ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0; // combine results @@ -174,7 +174,7 @@ export const detectFace = async (parent, input): Promise => { genderConfidence: descRes.genderConfidence, embedding: descRes.descriptor, emotion: emotionRes, - iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0, + iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0, rotation, tensor: parent.config.face.detector.return ? faces[i].image?.squeeze() : null, }); diff --git a/src/object/centernet.ts b/src/object/centernet.ts index db1283db..1dab6d8d 100644 --- a/src/object/centernet.ts +++ b/src/object/centernet.ts @@ -20,7 +20,7 @@ export async function load(config) { } async function process(res, inputSize, outputShape, config) { - const results: Array<{ score: number, class: number, label: string, box: number[], boxRaw: number[] }> = []; + const results: Array = []; const detections = res.arraySync(); const squeezeT = tf.squeeze(res); res.dispose(); @@ -38,6 +38,7 @@ async function process(res, inputSize, outputShape, config) { classesT.dispose(); const nms = nmsT.dataSync(); nmsT.dispose(); + let i = 0; for (const id of nms) { const score = detections[0][id][4]; const classVal = detections[0][id][5]; @@ -54,7 +55,7 @@ async function process(res, inputSize, outputShape, config) { Math.trunc(boxRaw[2] * outputShape[0]), Math.trunc(boxRaw[3] * outputShape[1]), ]; - results.push({ score, class: classVal, label, box, boxRaw }); + results.push({ id: i++, score, class: classVal, label, box, boxRaw }); } return results; } diff --git a/src/object/nanodet.ts b/src/object/nanodet.ts index b4c5c372..caafe0fd 100644 --- a/src/object/nanodet.ts +++ b/src/object/nanodet.ts @@ -23,7 +23,7 @@ export async function load(config) { async function process(res, inputSize, outputShape, config) { let id = 0; - let results: Array<{ score: number, strideSize: number, class: number, label: string, center: number[], centerRaw: number[], box: number[], boxRaw: number[] }> = []; + let results: Array = []; for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects // find scores, boxes, classes tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors diff --git a/src/result.ts b/src/result.ts index 2c222391..ea4a88d8 100644 --- a/src/result.ts +++ b/src/result.ts @@ -1,8 +1,4 @@ -/** - * Result interface definition for **Human** library - * - * Contains all possible detection results - */ +import { Tensor } from '../dist/tfjs.esm.js'; /** Face results * Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models @@ -30,8 +26,6 @@ * - matrix: 3d transofrmation matrix as array of numeric values * - tensor: face tensor as Tensor object which contains detected face */ -import { Tensor } from '../dist/tfjs.esm.js'; - export interface Face { id: number confidence: number, @@ -69,7 +63,6 @@ export interface Face { * - score: body part score value * - presence: body part presence value */ - export interface Body { id: number, score: number, @@ -115,6 +108,7 @@ export interface Hand { * - boxRaw as array of [x, y, width, height], normalized to range 0..1 */ export interface Item { + id: number, score: number, strideSize?: number, class: number, @@ -138,6 +132,11 @@ export type Gesture = | { 'body': number, gesture: string } | { 'hand': number, gesture: string } +/** + * Result interface definition for **Human** library + * + * Contains all possible detection results + */ export interface Result { /** {@link Face}: detection & analysis results */ face: Array,