switch to single jumbo dts

pull/91/head
Vladimir Mandic 2021-03-17 18:48:02 -04:00
parent f11074d9ab
commit 8ba3c795cf
52 changed files with 908 additions and 828 deletions

View File

@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/03/17 mandic00@live.com
- type definitions
### **1.1.9** 2021/03/17 mandic00@live.com

File diff suppressed because one or more lines are too long

896
dist/human.d.ts vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -6,7 +6,7 @@
"main": "dist/human.node.js",
"module": "dist/human.esm.js",
"browser": "dist/human.esm.js",
"types": "types/human.d.ts",
"types": "dist/human.d.ts",
"author": "Vladimir Mandic <mandic00@live.com>",
"bugs": {
"url": "https://github.com/vladmandic/human/issues"
@ -23,7 +23,7 @@
"scripts": {
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
"dev": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/serve.js",
"build": "rimraf dist/* types/* typedoc/* && node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/build.js",
"build": "rimraf dist/* typedoc/* && node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/build.js",
"lint": "eslint src server demo",
"test": "npm run lint && npm run start"
},

View File

@ -21,7 +21,7 @@ const tsconfig = {
noEmitOnError: false,
target: ts.ScriptTarget.ES2018,
module: ts.ModuleKind.ES2020,
outDir: 'types/',
out: 'dist/human.d.ts',
declaration: true,
emitDeclarationOnly: true,
emitDecoratorMetadata: true,

View File

@ -5,7 +5,7 @@
"moduleResolution": "node",
"lib": ["es2018", "dom"],
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"out": "dist/human.d.ts",
"declaration": true,
"emitDeclarationOnly": true,
"emitDecoratorMetadata": true,

View File

@ -66,7 +66,7 @@
<section class="tsd-panel tsd-comment">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Main Class for <strong>Human</strong> library</p>
<p><strong>Human</strong> library main class</p>
</div>
<p>All methods and properties are available only as members of Human class</p>
<ul>

2
types/age/age.d.ts vendored
View File

@ -1,2 +0,0 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

View File

@ -1,19 +0,0 @@
export declare const disposeBox: (box: any) => void;
export declare class BlazeFaceModel {
model: any;
anchorsData: any;
anchors: any;
inputSize: number;
config: any;
constructor(model: any, config: any);
getBoundingBoxes(inputImage: any): Promise<{
boxes: {
box: any;
landmarks: any;
anchor: any;
confidence: number;
}[];
scaleFactor: number[];
} | null>;
}
export declare function load(config: any): Promise<BlazeFaceModel>;

View File

@ -1,17 +0,0 @@
export declare function scaleBoxCoordinates(box: any, factor: any): {
startPoint: number[];
endPoint: number[];
};
export declare function getBoxSize(box: any): number[];
export declare function getBoxCenter(box: any): any[];
export declare function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
export declare function enlargeBox(box: any, factor?: number): {
startPoint: number[];
endPoint: any[];
landmarks: any;
};
export declare function squarifyBox(box: any): {
startPoint: number[];
endPoint: any[];
landmarks: any;
};

View File

@ -1,49 +0,0 @@
export declare const MESH_ANNOTATIONS: {
silhouette: number[];
lipsUpperOuter: number[];
lipsLowerOuter: number[];
lipsUpperInner: number[];
lipsLowerInner: number[];
rightEyeUpper0: number[];
rightEyeLower0: number[];
rightEyeUpper1: number[];
rightEyeLower1: number[];
rightEyeUpper2: number[];
rightEyeLower2: number[];
rightEyeLower3: number[];
rightEyebrowUpper: number[];
rightEyebrowLower: number[];
rightEyeIris: number[];
leftEyeUpper0: number[];
leftEyeLower0: number[];
leftEyeUpper1: number[];
leftEyeLower1: number[];
leftEyeUpper2: number[];
leftEyeLower2: number[];
leftEyeLower3: number[];
leftEyebrowUpper: number[];
leftEyebrowLower: number[];
leftEyeIris: number[];
midwayBetweenEyes: number[];
noseTip: number[];
noseBottom: number[];
noseRightCorner: number[];
noseLeftCorner: number[];
rightCheek: number[];
leftCheek: number[];
};
export declare const MESH_TO_IRIS_INDICES_MAP: {
key: string;
indices: number[];
}[];
export declare const UV468: number[][];
export declare const TRI468: number[];
export declare const TRI68: number[];
export declare const TRI33: number[];
export declare const TRI7: number[];
export declare const VTX68: number[];
export declare const VTX33: number[];
export declare const VTX7: number[];
export declare const UV68: number[][];
export declare const UV33: number[][];
export declare const UV7: number[][];

View File

@ -1,17 +0,0 @@
export declare class MediaPipeFaceMesh {
facePipeline: any;
config: any;
constructor(blazeFace: any, blazeMeshModel: any, irisModel: any, config: any);
estimateFaces(input: any, config: any): Promise<{
confidence: any;
boxConfidence: any;
faceConfidence: any;
box: any;
mesh: any;
boxRaw: any;
meshRaw: any;
annotations: any;
image: any;
}[]>;
}
export declare function load(config: any): Promise<MediaPipeFaceMesh>;

View File

@ -1,35 +0,0 @@
export declare class Pipeline {
storedBoxes: any;
boundingBoxDetector: any;
meshDetector: any;
irisModel: any;
boxSize: number;
meshSize: number;
irisSize: number;
irisEnlarge: number;
skipped: number;
detectedFaces: number;
constructor(boundingBoxDetector: any, meshDetector: any, irisModel: any);
transformRawCoords(rawCoords: any, box: any, angle: any, rotationMatrix: any): any;
getLeftToRightEyeDepthDifference(rawCoords: any): number;
getEyeBox(rawCoords: any, face: any, eyeInnerCornerIndex: any, eyeOuterCornerIndex: any, flip?: boolean): {
box: {
startPoint: number[];
endPoint: any[];
landmarks: any;
};
boxSize: number[];
crop: any;
};
getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): {
rawCoords: any[][];
iris: any[][];
};
getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any;
predict(input: any, config: any): Promise<any>;
calculateLandmarksBoundingBox(landmarks: any): {
startPoint: number[];
endPoint: number[];
landmarks: any;
};
}

View File

@ -1,21 +0,0 @@
export declare const IDENTITY_MATRIX: number[][];
/**
* Normalizes the provided angle to the range -pi to pi.
* @param angle The angle in radians to be normalized.
*/
export declare function normalizeRadians(angle: any): number;
/**
* Computes the angle of rotation between two anchor points.
* @param point1 First anchor point
* @param point2 Second anchor point
*/
export declare function computeRotation(point1: any, point2: any): number;
export declare function radToDegrees(rad: any): number;
export declare function buildTranslationMatrix(x: any, y: any): any[][];
export declare function dot(v1: any, v2: any): number;
export declare function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
export declare function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
export declare function buildRotationMatrix(rotation: any, center: any): number[][];
export declare function invertTransformMatrix(matrix: any): any[][];
export declare function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];
export declare function xyDistanceBetweenPoints(a: any, b: any): number;

View File

@ -1,2 +0,0 @@
export declare const full: string[];
export declare const upper: string[];

View File

@ -1,14 +0,0 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<{
keypoints: {
id: any;
part: any;
position: {
x;
y;
z;
};
score: any;
presence: any;
}[];
}[] | null>;

115
types/config.d.ts vendored
View File

@ -1,115 +0,0 @@
/**
* Configuration interface definition for **Human** library
*
* Contains all configurable parameters
*/
export interface Config {
backend: String;
wasmPath: String;
debug: Boolean;
async: Boolean;
profile: Boolean;
deallocate: Boolean;
scoped: Boolean;
videoOptimized: Boolean;
warmup: String;
filter: {
enabled: Boolean;
width: Number;
height: Number;
return: Boolean;
brightness: Number;
contrast: Number;
sharpness: Number;
blur: Number;
saturation: Number;
hue: Number;
negative: Boolean;
sepia: Boolean;
vintage: Boolean;
kodachrome: Boolean;
technicolor: Boolean;
polaroid: Boolean;
pixelate: Number;
};
gesture: {
enabled: Boolean;
};
face: {
enabled: Boolean;
detector: {
modelPath: String;
rotation: Boolean;
maxFaces: Number;
skipFrames: Number;
skipInitial: Boolean;
minConfidence: Number;
iouThreshold: Number;
scoreThreshold: Number;
return: Boolean;
};
mesh: {
enabled: Boolean;
modelPath: String;
};
iris: {
enabled: Boolean;
modelPath: String;
};
age: {
enabled: Boolean;
modelPath: String;
skipFrames: Number;
};
gender: {
enabled: Boolean;
minConfidence: Number;
modelPath: String;
skipFrames: Number;
};
emotion: {
enabled: Boolean;
minConfidence: Number;
skipFrames: Number;
modelPath: String;
};
embedding: {
enabled: Boolean;
modelPath: String;
};
};
body: {
enabled: Boolean;
modelPath: String;
maxDetections: Number;
scoreThreshold: Number;
nmsRadius: Number;
};
hand: {
enabled: Boolean;
rotation: Boolean;
skipFrames: Number;
skipInitial: Boolean;
minConfidence: Number;
iouThreshold: Number;
scoreThreshold: Number;
maxHands: Number;
landmarks: Boolean;
detector: {
modelPath: String;
};
skeleton: {
modelPath: String;
};
};
object: {
enabled: Boolean;
modelPath: String;
minConfidence: Number;
iouThreshold: Number;
maxResults: Number;
skipFrames: Number;
};
}
declare const config: Config;
export { config as defaults };

26
types/draw/draw.d.ts vendored
View File

@ -1,26 +0,0 @@
export declare const drawOptions: {
color: string;
labelColor: string;
shadowColor: string;
font: string;
lineHeight: number;
lineWidth: number;
pointSize: number;
roundRect: number;
drawPoints: Boolean;
drawLabels: Boolean;
drawBoxes: Boolean;
drawPolygons: Boolean;
fillPolygons: Boolean;
useDepth: Boolean;
useCurves: Boolean;
bufferedOutput: Boolean;
useRawBoxes: Boolean;
};
export declare function gesture(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
export declare function face(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
export declare function body(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
export declare function hand(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
export declare function object(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
export declare function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement): Promise<void>;
export declare function all(inCanvas: HTMLCanvasElement, result: any): Promise<void>;

View File

@ -1,12 +0,0 @@
declare type Tensor = {};
export declare function load(config: any): Promise<any>;
export declare function simmilarity(embedding1: any, embedding2: any, order?: number): Number;
export declare function match(embedding: Array<Number>, db: Array<any>, threshold?: number): {
simmilarity: number;
name: string;
source: string;
embedding: never[];
};
export declare function enhance(input: any): Tensor;
export declare function predict(input: any, config: any): Promise<number[]>;
export {};

View File

@ -1,2 +0,0 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

View File

@ -1,2 +0,0 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

View File

@ -1,16 +0,0 @@
export declare const body: (res: any) => {
body: number;
gesture: string;
}[];
export declare const face: (res: any) => {
face: number;
gesture: string;
}[];
export declare const iris: (res: any) => {
iris: number;
gesture: string;
}[];
export declare const hand: (res: any) => {
hand: number;
gesture: string;
}[];

View File

@ -1,6 +0,0 @@
export declare const anchors: {
w: number;
h: number;
x_center: number;
y_center: number;
}[];

View File

@ -1,24 +0,0 @@
export declare function getBoxSize(box: any): number[];
export declare function getBoxCenter(box: any): any[];
export declare function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
export declare function scaleBoxCoordinates(box: any, factor: any): {
startPoint: number[];
endPoint: number[];
palmLandmarks: any;
confidence: any;
};
export declare function enlargeBox(box: any, factor?: number): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
export declare function squarifyBox(box: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
export declare function shiftBox(box: any, shiftFactor: any): {
startPoint: any[];
endPoint: any[];
palmLandmarks: any;
};

View File

@ -1,17 +0,0 @@
export declare class HandDetector {
model: any;
anchors: any;
anchorsTensor: any;
inputSize: number;
inputSizeTensor: any;
doubleInputSizeTensor: any;
constructor(model: any, inputSize: any, anchorsAnnotated: any);
normalizeBoxes(boxes: any): any;
normalizeLandmarks(rawPalmLandmarks: any, index: any): any;
getBoxes(input: any, config: any): Promise<{
box: any;
palmLandmarks: any;
confidence: number;
}[]>;
estimateHandBounds(input: any, config: any): Promise<{}[]>;
}

View File

@ -1,25 +0,0 @@
export declare class HandPipeline {
handDetector: any;
landmarkDetector: any;
inputSize: number;
storedBoxes: any;
skipped: number;
detectedHands: number;
constructor(handDetector: any, landmarkDetector: any, inputSize: any);
getBoxForPalmLandmarks(palmLandmarks: any, rotationMatrix: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
getBoxForHandLandmarks(landmarks: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
transformRawCoords(rawCoords: any, box2: any, angle: any, rotationMatrix: any): any;
estimateHands(image: any, config: any): Promise<{}[]>;
calculateLandmarksBoundingBox(landmarks: any): {
startPoint: number[];
endPoint: number[];
};
}

View File

@ -1,20 +0,0 @@
export declare class HandPose {
handPipeline: any;
constructor(handPipeline: any);
static getAnnotations(): {
thumb: number[];
indexFinger: number[];
middleFinger: number[];
ringFinger: number[];
pinky: number[];
palmBase: number[];
};
estimateHands(input: any, config: any): Promise<{
confidence: number;
box: any;
boxRaw: any;
landmarks: any;
annotations: any;
}[]>;
}
export declare function load(config: any): Promise<HandPose>;

View File

@ -1,9 +0,0 @@
export declare function normalizeRadians(angle: any): number;
export declare function computeRotation(point1: any, point2: any): number;
export declare const buildTranslationMatrix: (x: any, y: any) => any[][];
export declare function dot(v1: any, v2: any): number;
export declare function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
export declare function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
export declare function buildRotationMatrix(rotation: any, center: any): number[][];
export declare function invertTransformMatrix(matrix: any): any[][];
export declare function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];

108
types/human.d.ts vendored
View File

@ -1,108 +0,0 @@
import * as tf from '../dist/tfjs.esm.js';
import * as facemesh from './blazeface/facemesh';
import * as age from './age/age';
import * as gender from './gender/gender';
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as nanodet from './nanodet/nanodet';
import * as draw from './draw/draw';
import { Config } from './config';
import { Result } from './result';
declare type Tensor = {};
declare type Model = {};
export type { Config } from './config';
export type { Result } from './result';
/** Defines all possible input types for **Human** detection */
export declare type Input = Tensor | ImageData | ImageBitmap | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
/** Error message */
export declare type Error = {
error: String;
};
export declare type TensorFlow = typeof tf;
/**
* Main Class for **Human** library
*
* All methods and properties are available only as members of Human class
*
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
*/
export declare class Human {
#private;
version: String;
config: Config;
state: String;
image: {
tensor: Tensor;
canvas: OffscreenCanvas | HTMLCanvasElement;
};
tf: TensorFlow;
draw: {
drawOptions?: typeof draw.drawOptions;
gesture: typeof draw.gesture;
face: typeof draw.face;
body: typeof draw.body;
hand: typeof draw.hand;
canvas: typeof draw.canvas;
all: typeof draw.all;
};
models: {
face: facemesh.MediaPipeFaceMesh | null;
posenet: posenet.PoseNet | null;
blazepose: Model | null;
handpose: handpose.HandPose | null;
iris: Model | null;
age: Model | null;
gender: Model | null;
emotion: Model | null;
embedding: Model | null;
nanodet: Model | null;
};
classes: {
facemesh: typeof facemesh;
age: typeof age;
gender: typeof gender;
emotion: typeof emotion;
body: typeof posenet | typeof blazepose;
hand: typeof handpose;
nanodet: typeof nanodet;
};
sysinfo: {
platform: String;
agent: String;
};
constructor(userConfig?: Config | Object);
profileData(): {
newBytes: any;
newTensors: any;
peakBytes: any;
numKernelOps: any;
timeKernelOps: any;
slowestKernelOps: any;
largestKernelOps: any;
} | {};
simmilarity(embedding1: Array<Number>, embedding2: Array<Number>): Number;
enhance(input: Tensor): Tensor | null;
match(faceEmbedding: Array<Number>, db: Array<{
name: String;
source: String | undefined;
embedding: Array<Number>;
}>, threshold?: number): {
name: String;
source: String | undefined;
simmilarity: Number;
embedding: Array<Number>;
};
load(userConfig?: Config | Object): Promise<void>;
detect(input: Input, userConfig?: Config | Object): Promise<Result | Error>;
warmup(userConfig?: Config | Object): Promise<Result | {
error: any;
}>;
}
/**
* Class Human is also available as default export
*/
export { Human as default };

View File

@ -1,5 +0,0 @@
import * as tf from '../../dist/tfjs.esm.js';
export declare function process(input: any, config: any): {
tensor: tf.Tensor;
canvas: OffscreenCanvas | HTMLCanvasElement;
};

1
types/log.d.ts vendored
View File

@ -1 +0,0 @@
export declare function log(...msg: any[]): void;

View File

@ -1,2 +0,0 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

View File

@ -1,2 +0,0 @@
import * as heapSort from './heapSort';
export declare function buildPartWithScoreQueue(scoreThreshold: any, localMaximumRadius: any, scores: any): heapSort.MaxHeap;

View File

@ -1,4 +0,0 @@
export declare function decodeMultiplePoses(scoresBuffer: any, offsetsBuffer: any, displacementsFwdBuffer: any, displacementsBwdBuffer: any, nmsRadius: any, maxDetections: any, scoreThreshold: any): {
keypoints: any;
score: number;
}[];

View File

@ -1,12 +0,0 @@
export declare function decodePose(root: any, scores: any, offsets: any, outputStride: any, displacementsFwd: any, displacementsBwd: any): any[];
export declare function decodeSinglePose(heatmapScores: any, offsets: any, minScore: any): Promise<{
keypoints: {
position: {
y: any;
x: any;
};
part: string;
score: number;
}[];
score: number;
}>;

View File

@ -1,4 +0,0 @@
export declare function getPointsConfidence(heatmapScores: any, heatMapCoords: any): Float32Array;
export declare function getOffsetVectors(heatMapCoordsBuffer: any, offsetsBuffer: any): any;
export declare function getOffsetPoints(heatMapCoordsBuffer: any, outputStride: any, offsetsBuffer: any): any;
export declare function argmax2d(inputs: any): any;

View File

@ -1,17 +0,0 @@
export declare class MaxHeap {
priorityQueue: any;
numberOfElements: number;
getElementValue: any;
constructor(maxSize: any, getElementValue: any);
enqueue(x: any): void;
dequeue(): any;
empty(): boolean;
size(): number;
all(): any;
max(): any;
swim(k: any): void;
sink(k: any): void;
getValueAt(i: any): any;
less(i: any, j: any): boolean;
exchange(i: any, j: any): void;
}

View File

@ -1,6 +0,0 @@
export declare const partNames: string[];
export declare const NUM_KEYPOINTS: any;
export declare const partIds: any;
export declare const connectedPartIndices: any[][];
export declare const poseChain: string[][];
export declare const partChannels: string[];

View File

@ -1,6 +0,0 @@
export declare class BaseModel {
model: any;
constructor(model: any);
predict(input: any): any;
dispose(): void;
}

View File

@ -1,8 +0,0 @@
export declare class PoseNet {
baseModel: any;
inputSize: number;
constructor(model: any);
estimatePoses(input: any, config: any): Promise<unknown>;
dispose(): void;
}
export declare function load(config: any): Promise<PoseNet>;

View File

@ -1,14 +0,0 @@
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
export declare function getBoundingBox(keypoints: any): any;
export declare function getBoundingBoxPoints(keypoints: any): {
x: any;
y: any;
}[];
export declare function toTensorBuffers3D(tensors: any): Promise<[unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown]>;
export declare function scalePose(pose: any, scaleY: any, scaleX: any): {
score: any;
keypoints: any;
};
export declare function resizeTo(image: any, [targetH, targetW]: [any, any]): any;
export declare function scaleAndFlipPoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any;

View File

@ -1,19 +0,0 @@
export declare function getOffsetPoint(y: any, x: any, keypoint: any, offsets: any): {
y: any;
x: any;
};
export declare function getImageCoords(part: any, outputStride: any, offsets: any): {
x: any;
y: any;
};
export declare function fillArray(element: any, size: any): any[];
export declare function clamp(a: any, min: any, max: any): any;
export declare function squaredDistance(y1: any, x1: any, y2: any, x2: any): number;
export declare function addVectors(a: any, b: any): {
x: any;
y: any;
};
export declare function clampVector(a: any, min: any, max: any): {
y: any;
x: any;
};

2
types/profile.d.ts vendored
View File

@ -1,2 +0,0 @@
export declare const data: {};
export declare function run(name: string, raw: any): void;

132
types/result.d.ts vendored
View File

@ -1,132 +0,0 @@
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
export interface Result {
/** Face results
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
* Some values may be null if specific model is not enabled
*
* Array of individual results with one object per detected face
* Each result has:
* - overal detection confidence value
* - box detection confidence value
* - mesh detection confidence value
* - box as array of [x, y, width, height], normalized to image resolution
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
* - mesh as array of [x, y, z] points of face mesh, normalized to image resolution
* - meshRaw as array of [x, y, z] points of face mesh, normalized to range 0..1
* - annotations as array of annotated face mesh points
* - age as value
* - gender as value
* - genderConfidence as value
* - emotion as array of possible emotions with their individual scores
* - iris as distance value
* - angle as object with values for roll, yaw and pitch angles
*/
face: Array<{
confidence: Number;
boxConfidence: Number;
faceConfidence: Number;
box: [Number, Number, Number, Number];
boxRaw: [Number, Number, Number, Number];
mesh: Array<[Number, Number, Number]>;
meshRaw: Array<[Number, Number, Number]>;
annotations: Array<{
part: String;
points: Array<[Number, Number, Number]>[];
}>;
age: Number;
gender: String;
genderConfidence: Number;
emotion: Array<{
score: Number;
emotion: String;
}>;
embedding: Array<Number>;
iris: Number;
angle: {
roll: Number;
yaw: Number;
pitch: Number;
};
}>;
/** Body results
*
* Array of individual results with one object per detected body
* Each results has:
* - body id number
* - body part name
* - part position with x,y,z coordinates
* - body part score value
* - body part presence value
*/
body: Array<{
id: Number;
part: String;
position: {
x: Number;
y: Number;
z: Number;
};
score: Number;
presence: Number;
}>;
/** Hand results
*
* Array of individual results with one object per detected hand
* Each result has:
* - confidence as value
* - box as array of [x, y, width, height], normalized to image resolution
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
* - landmarks as array of [x, y, z] points of hand, normalized to image resolution
* - annotations as array of annotated face landmark points
*/
hand: Array<{
confidence: Number;
box: [Number, Number, Number, Number];
boxRaw: [Number, Number, Number, Number];
landmarks: Array<[Number, Number, Number]>;
annotations: Array<{
part: String;
points: Array<[Number, Number, Number]>[];
}>;
}>;
/** Gesture results
*
* Array of individual results with one object per detected gesture
* Each result has:
* - part where gesture was detected
* - gesture detected
*/
gesture: Array<{
part: String;
gesture: String;
}>;
/** Object results
*
* Array of individual results with one object per detected gesture
* Each result has:
* - score as value
* - label as detected class name
* - center as array of [x, y], normalized to image resolution
* - centerRaw as array of [x, y], normalized to range 0..1
* - box as array of [x, y, width, height], normalized to image resolution
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
*/
object: Array<{
score: Number;
strideSize: Number;
class: Number;
label: String;
center: Number[];
centerRaw: Number[];
box: Number[];
boxRaw: Number[];
}>;
performance: {
any: any;
};
canvas: OffscreenCanvas | HTMLCanvasElement;
}

2
types/sample.d.ts vendored

File diff suppressed because one or more lines are too long

4
types/sysinfo.d.ts vendored
View File

@ -1,4 +0,0 @@
export declare function info(): {
platform: any;
agent: any;
};

View File

@ -1,19 +0,0 @@
export declare const config: {
name: string;
priority: number;
canvas: null;
gl: null;
width: number;
height: number;
webGLattr: {
alpha: boolean;
antialias: boolean;
premultipliedAlpha: boolean;
preserveDrawingBuffer: boolean;
depth: boolean;
stencil: boolean;
failIfMajorPerformanceCaveat: boolean;
desynchronized: boolean;
};
};
export declare function register(): void;