mirror of https://github.com/vladmandic/human
update build with automatic linter
parent
73edfb9f44
commit
3cdbcbb860
|
@ -11,11 +11,11 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
### **HEAD -> main** 2021/06/02 mandic00@live.com
|
||||
|
||||
- switch worker from module to iife importscripts
|
||||
- release candidate
|
||||
- added samples to git
|
||||
- implemented drag & drop for image processing
|
||||
|
||||
### **origin/main** 2021/06/01 mandic00@live.com
|
||||
|
||||
- release candidate
|
||||
- breaking changes to results.face output properties
|
||||
- breaking changes to results.object output properties
|
||||
- breaking changes to results.hand output properties
|
||||
|
|
|
@ -25,7 +25,8 @@
|
|||
"dev": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/serve.js",
|
||||
"build": "rimraf dist/* typedoc/* types/* && node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/build.js",
|
||||
"lint": "eslint src server demo test",
|
||||
"test": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught test/test.js"
|
||||
"test": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught test/test.js",
|
||||
"scan": "npx auditjs@latest ossi --dev --quiet"
|
||||
},
|
||||
"keywords": [
|
||||
"human",
|
||||
|
@ -62,7 +63,7 @@
|
|||
"@tensorflow/tfjs-layers": "^3.6.0",
|
||||
"@tensorflow/tfjs-node": "^3.6.1",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.6.1",
|
||||
"@types/node": "^15.6.1",
|
||||
"@types/node": "^15.9.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
"@vladmandic/pilogger": "^0.2.17",
|
||||
|
|
|
@ -37,6 +37,12 @@ export interface Config {
|
|||
*/
|
||||
cacheSensitivity: number;
|
||||
|
||||
/** Cache sensitivity
|
||||
* - values 0..1 where 0.01 means reset cache if input changed more than 1%
|
||||
* - set to 0 to disable caching
|
||||
*/
|
||||
skipFrame: boolean;
|
||||
|
||||
/** Run input through image filters before inference
|
||||
* - image filters run with near-zero latency as they are executed on the GPU
|
||||
*/
|
||||
|
@ -147,6 +153,7 @@ export interface Config {
|
|||
modelPath: string,
|
||||
maxDetected: number,
|
||||
minConfidence: number,
|
||||
skipFrames: number,
|
||||
},
|
||||
|
||||
/** Controlls and configures all hand detection specific options
|
||||
|
@ -205,6 +212,7 @@ const config: Config = {
|
|||
cacheSensitivity: 0.75, // cache sensitivity
|
||||
// values 0..1 where 0.01 means reset cache if input changed more than 1%
|
||||
// set to 0 to disable caching
|
||||
skipFrame: false, // internal & dynamic
|
||||
filter: { // run input through image filters before inference
|
||||
// image filters run with near-zero latency as they are executed on the GPU
|
||||
enabled: true, // enable image pre-processing filters
|
||||
|
@ -294,13 +302,15 @@ const config: Config = {
|
|||
// should be set to the minimum number for performance
|
||||
// only valid for posenet as other models detects single pose
|
||||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
},
|
||||
skipFrames: 16, // how many max frames to go without re-running the detector
|
||||
// only used when cacheSensitivity is not zero
|
||||
},
|
||||
|
||||
hand: {
|
||||
enabled: true,
|
||||
rotation: true, // use best-guess rotated hand image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect finger mapping if hand is inverted
|
||||
skipFrames: 18, // how many max frames to go without re-running the hand bounding box detector
|
||||
skipFrames: 19, // how many max frames to go without re-running the hand bounding box detector
|
||||
// only used when cacheSensitivity is not zero
|
||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated hand skeleton analysis as the hand probably
|
||||
|
@ -325,7 +335,7 @@ const config: Config = {
|
|||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
iouThreshold: 0.4, // ammount of overlap between two detected objects before one object is removed
|
||||
maxDetected: 10, // maximum number of objects detected in the input
|
||||
skipFrames: 19, // how many max frames to go without re-running the detector
|
||||
skipFrames: 20, // how many max frames to go without re-running the detector
|
||||
// only used when cacheSensitivity is not zero
|
||||
},
|
||||
};
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Body } from '../result';
|
||||
import { GraphModel } from '../tfjs/types';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
let model: GraphModel;
|
||||
|
||||
|
@ -19,7 +20,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
|
||||
const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle'];
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
// @ts-ignore type mismatch on GraphModel
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
||||
|
@ -50,7 +51,7 @@ function max2d(inputs, minScore) {
|
|||
});
|
||||
}
|
||||
|
||||
export async function predict(image, config): Promise<Body[]> {
|
||||
export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
||||
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped++;
|
||||
return [{ id: 0, score, box, boxRaw, keypoints }];
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import { Config } from '../config';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
||||
|
@ -15,7 +17,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
// tuning values
|
||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.emotion.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.emotion.modelPath);
|
||||
|
@ -24,7 +26,7 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config, idx, count) {
|
||||
export async function predict(image: Tensor, config: Config, idx, count) {
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.emotion.skipFrames) && config.skipFrame && (lastCount === count) && last[idx] && (last[idx].length > 0)) {
|
||||
skipped++;
|
||||
|
|
|
@ -171,11 +171,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
|||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Emotion:');
|
||||
if (parent.config.async) {
|
||||
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].image, parent.config, i, faces.length) : {};
|
||||
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||
} else {
|
||||
parent.state = 'run:emotion';
|
||||
timeStamp = now();
|
||||
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].image, parent.config, i, faces.length) : {};
|
||||
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
parent.analyze('End Emotion:');
|
||||
|
@ -183,11 +183,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
|||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Description:');
|
||||
if (parent.config.async) {
|
||||
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i], parent.config, i, faces.length) : [];
|
||||
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
|
||||
} else {
|
||||
parent.state = 'run:description';
|
||||
timeStamp = now();
|
||||
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].image, parent.config, i, faces.length) : [];
|
||||
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
|
||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
parent.analyze('End Description:');
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
let model: GraphModel;
|
||||
const last: Array<{
|
||||
|
@ -21,7 +22,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
|
||||
type DB = Array<{ name: string, source: string, embedding: number[] }>;
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
|
||||
if (!model) {
|
||||
// @ts-ignore type mismatch for GraphModel
|
||||
|
@ -32,7 +33,7 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
export function similarity(embedding1, embedding2, order = 2): number {
|
||||
export function similarity(embedding1: Array<number>, embedding2: Array<number>, order = 2): number {
|
||||
if (!embedding1 || !embedding2) return 0;
|
||||
if (embedding1?.length === 0 || embedding2?.length === 0) return 0;
|
||||
if (embedding1?.length !== embedding2?.length) return 0;
|
||||
|
@ -110,7 +111,7 @@ export function enhance(input): Tensor {
|
|||
return image;
|
||||
}
|
||||
|
||||
export async function predict(image, config, idx, count) {
|
||||
export async function predict(image: Tensor, config: Config, idx, count) {
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.description.skipFrames) && config.skipFrame && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) {
|
||||
skipped++;
|
||||
|
|
|
@ -7,7 +7,8 @@ import * as tf from '../../dist/tfjs.esm.js';
|
|||
import * as handdetector from './handdetector';
|
||||
import * as handpipeline from './handpipeline';
|
||||
import { Hand } from '../result';
|
||||
import { GraphModel } from '../tfjs/types';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
const meshAnnotations = {
|
||||
thumb: [1, 2, 3, 4],
|
||||
|
@ -22,7 +23,7 @@ let handDetectorModel: GraphModel | null;
|
|||
let handPoseModel: GraphModel | null;
|
||||
let handPipeline: handpipeline.HandPipeline;
|
||||
|
||||
export async function predict(input, config): Promise<Hand[]> {
|
||||
export async function predict(input: Tensor, config: Config): Promise<Hand[]> {
|
||||
const predictions = await handPipeline.estimateHands(input, config);
|
||||
if (!predictions) return [];
|
||||
const hands: Array<Hand> = [];
|
||||
|
@ -48,19 +49,19 @@ export async function predict(input, config): Promise<Hand[]> {
|
|||
}
|
||||
box[2] -= box[0];
|
||||
box[3] -= box[1];
|
||||
boxRaw = [box[0] / input.shape[2], box[1] / input.shape[1], box[2] / input.shape[2], box[3] / input.shape[1]];
|
||||
boxRaw = [box[0] / (input.shape[2] || 0), box[1] / (input.shape[1] || 0), box[2] / (input.shape[2] || 0), box[3] / (input.shape[1] || 0)];
|
||||
} else { // otherwise use box from prediction
|
||||
box = predictions[i].box ? [
|
||||
Math.trunc(Math.max(0, predictions[i].box.topLeft[0])),
|
||||
Math.trunc(Math.max(0, predictions[i].box.topLeft[1])),
|
||||
Math.trunc(Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0])),
|
||||
Math.trunc(Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])),
|
||||
Math.trunc(Math.min((input.shape[2] || 0), predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0])),
|
||||
Math.trunc(Math.min((input.shape[1] || 0), predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])),
|
||||
] : [0, 0, 0, 0];
|
||||
boxRaw = [
|
||||
(predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
(predictions[i].box.topLeft[0]) / (input.shape[2] || 0),
|
||||
(predictions[i].box.topLeft[1]) / (input.shape[1] || 0),
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / (input.shape[2] || 0),
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / (input.shape[1] || 0),
|
||||
];
|
||||
}
|
||||
hands.push({ id: i, score: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, keypoints, annotations });
|
||||
|
@ -68,7 +69,7 @@ export async function predict(input, config): Promise<Hand[]> {
|
|||
return hands;
|
||||
}
|
||||
|
||||
export async function load(config): Promise<[unknown, unknown]> {
|
||||
export async function load(config: Config): Promise<[unknown, unknown]> {
|
||||
if (!handDetectorModel || !handPoseModel) {
|
||||
// @ts-ignore type mismatch on GraphModel
|
||||
[handDetectorModel, handPoseModel] = await Promise.all([
|
||||
|
|
|
@ -12,7 +12,7 @@ export function join(folder: string, file: string): string {
|
|||
}
|
||||
|
||||
// helper function: wrapper around console output
|
||||
export function log(...msg) {
|
||||
export function log(...msg): void {
|
||||
const dt = new Date();
|
||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||
// eslint-disable-next-line no-console
|
||||
|
|
10
src/human.ts
10
src/human.ts
|
@ -132,7 +132,7 @@ export class Human {
|
|||
/** Platform and agent information detected by Human */
|
||||
sysinfo: { platform: string, agent: string };
|
||||
/** Performance object that contains values for all recently performed operations */
|
||||
performance: Record<string, unknown>; // perf members are dynamically defined as needed
|
||||
performance: Record<string, number>; // perf members are dynamically defined as needed
|
||||
#numTensors: number;
|
||||
#analyzeMemoryLeaks: boolean;
|
||||
#checkSanity: boolean;
|
||||
|
@ -258,7 +258,7 @@ export class Human {
|
|||
async load(userConfig?: Config | Record<string, unknown>) {
|
||||
this.state = 'load';
|
||||
const timeStamp = now();
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
|
||||
if (this.#firstRun) { // print version info on first run and check for correct backend setup
|
||||
if (this.config.debug) log(`version: ${this.version}`);
|
||||
|
@ -432,7 +432,7 @@ export class Human {
|
|||
let timeStamp;
|
||||
|
||||
// update configuration
|
||||
this.config = mergeDeep(this.config, userConfig);
|
||||
this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
|
||||
// sanity checks
|
||||
this.state = 'check';
|
||||
|
@ -478,12 +478,10 @@ export class Human {
|
|||
this.analyze('Get Image:');
|
||||
|
||||
timeStamp = now();
|
||||
// @ts-ignore hidden dynamic property that is not part of definitions
|
||||
this.config.skipFrame = await this.#skipFrame(process.tensor);
|
||||
if (!this.performance.frames) this.performance.frames = 0;
|
||||
if (!this.performance.cached) this.performance.cached = 0;
|
||||
(this.performance.frames as number)++;
|
||||
// @ts-ignore hidden dynamic property that is not part of definitions
|
||||
if (this.config.skipFrame) this.performance.cached++;
|
||||
this.performance.changed = Math.trunc(now() - timeStamp);
|
||||
this.analyze('Check Changed:');
|
||||
|
@ -678,7 +676,7 @@ export class Human {
|
|||
*/
|
||||
async warmup(userConfig?: Config | Record<string, unknown>): Promise<Result | { error }> {
|
||||
const t0 = now();
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' };
|
||||
let res;
|
||||
if (typeof createImageBitmap === 'function') res = await this.#warmupBitmap();
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Body } from '../result';
|
||||
import { GraphModel } from '../tfjs/types';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
let model: GraphModel;
|
||||
|
||||
|
@ -19,7 +20,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
|
||||
const bodyParts = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder', 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist', 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle'];
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
// @ts-ignore type mismatch on GraphModel
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
||||
|
@ -29,7 +30,7 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config): Promise<Body[]> {
|
||||
export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
||||
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped++;
|
||||
return [{ id: 0, score, box, boxRaw, keypoints }];
|
||||
|
@ -63,8 +64,8 @@ export async function predict(image, config): Promise<Body[]> {
|
|||
kpt[id][0],
|
||||
],
|
||||
position: [ // normalized to input image size
|
||||
Math.round(image.shape[2] * kpt[id][1]),
|
||||
Math.round(image.shape[1] * kpt[id][0]),
|
||||
Math.round((image.shape[2] || 0) * kpt[id][1]),
|
||||
Math.round((image.shape[1] || 0) * kpt[id][0]),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
|
|
@ -6,12 +6,14 @@ import { log, join } from '../helpers';
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
import { Item } from '../result';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
let model;
|
||||
let last: Item[] = [];
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath));
|
||||
const inputs = Object.values(model.modelSignature['inputs']);
|
||||
|
@ -23,7 +25,7 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
async function process(res, inputSize, outputShape, config) {
|
||||
async function process(res: Tensor, inputSize, outputShape, config: Config) {
|
||||
if (!res) return [];
|
||||
const results: Array<Item> = [];
|
||||
const detections = res.arraySync();
|
||||
|
@ -64,7 +66,7 @@ async function process(res, inputSize, outputShape, config) {
|
|||
return results;
|
||||
}
|
||||
|
||||
export async function predict(input, config): Promise<Item[]> {
|
||||
export async function predict(input: Tensor, config: Config): Promise<Item[]> {
|
||||
if ((skipped < config.object.skipFrames) && config.skipFrame && (last.length > 0)) {
|
||||
skipped++;
|
||||
return last;
|
||||
|
|
|
@ -6,6 +6,8 @@ import { log, join } from '../helpers';
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
import { Item } from '../result';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
let model;
|
||||
let last: Array<Item> = [];
|
||||
|
@ -13,7 +15,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
|
||||
const scaleBox = 2.5; // increase box size
|
||||
|
||||
export async function load(config) {
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath));
|
||||
const inputs = Object.values(model.modelSignature['inputs']);
|
||||
|
@ -100,7 +102,7 @@ async function process(res, inputSize, outputShape, config) {
|
|||
return results;
|
||||
}
|
||||
|
||||
export async function predict(image, config): Promise<Item[]> {
|
||||
export async function predict(image: Tensor, config: Config): Promise<Item[]> {
|
||||
if ((skipped < config.object.skipFrames) && config.skipFrame && (last.length > 0)) {
|
||||
skipped++;
|
||||
return last;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
* Type definitions for Human result object
|
||||
*/
|
||||
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
import { Tensor } from './tfjs/types';
|
||||
|
||||
/** Face results
|
||||
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
||||
|
@ -52,8 +52,8 @@ export interface Face {
|
|||
matrix: [number, number, number, number, number, number, number, number, number],
|
||||
gaze: { bearing: number, strength: number },
|
||||
}
|
||||
image?: typeof Tensor;
|
||||
tensor: typeof Tensor,
|
||||
image?: Tensor;
|
||||
tensor: Tensor,
|
||||
}
|
||||
|
||||
/** Body results
|
||||
|
|
|
@ -35,6 +35,6 @@
|
|||
"entryPoints": "src/human.ts",
|
||||
"logLevel": "Info",
|
||||
"logger": "none",
|
||||
"theme": "wiki/theme/"
|
||||
"theme": "wiki/theme/",
|
||||
}
|
||||
}
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit 7910d0735849c1fef131ca71969b8a6a080772da
|
||||
Subproject commit 0087af5684c5722b2cf7ffd3db57b8117b7ac8c5
|
Loading…
Reference in New Issue