mirror of https://github.com/vladmandic/human
897 lines
84 KiB
TypeScript
897 lines
84 KiB
TypeScript
![]() |
declare module "log" {
|
||
|
export function log(...msg: any[]): void;
|
||
|
}
|
||
|
declare module "sysinfo" {
|
||
|
export function info(): {
|
||
|
platform: any;
|
||
|
agent: any;
|
||
|
};
|
||
|
}
|
||
|
declare module "tfjs/backend" {
|
||
|
export const config: {
|
||
|
name: string;
|
||
|
priority: number;
|
||
|
canvas: null;
|
||
|
gl: null;
|
||
|
width: number;
|
||
|
height: number;
|
||
|
webGLattr: {
|
||
|
alpha: boolean;
|
||
|
antialias: boolean;
|
||
|
premultipliedAlpha: boolean;
|
||
|
preserveDrawingBuffer: boolean;
|
||
|
depth: boolean;
|
||
|
stencil: boolean;
|
||
|
failIfMajorPerformanceCaveat: boolean;
|
||
|
desynchronized: boolean;
|
||
|
};
|
||
|
};
|
||
|
export function register(): void;
|
||
|
}
|
||
|
declare module "blazeface/blazeface" {
|
||
|
export const disposeBox: (box: any) => void;
|
||
|
export class BlazeFaceModel {
|
||
|
model: any;
|
||
|
anchorsData: any;
|
||
|
anchors: any;
|
||
|
inputSize: number;
|
||
|
config: any;
|
||
|
constructor(model: any, config: any);
|
||
|
getBoundingBoxes(inputImage: any): Promise<{
|
||
|
boxes: {
|
||
|
box: any;
|
||
|
landmarks: any;
|
||
|
anchor: any;
|
||
|
confidence: number;
|
||
|
}[];
|
||
|
scaleFactor: number[];
|
||
|
} | null>;
|
||
|
}
|
||
|
export function load(config: any): Promise<BlazeFaceModel>;
|
||
|
}
|
||
|
declare module "blazeface/box" {
|
||
|
export function scaleBoxCoordinates(box: any, factor: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: number[];
|
||
|
};
|
||
|
export function getBoxSize(box: any): number[];
|
||
|
export function getBoxCenter(box: any): any[];
|
||
|
export function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
|
||
|
export function enlargeBox(box: any, factor?: number): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
landmarks: any;
|
||
|
};
|
||
|
export function squarifyBox(box: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
landmarks: any;
|
||
|
};
|
||
|
}
|
||
|
declare module "blazeface/util" {
|
||
|
export const IDENTITY_MATRIX: number[][];
|
||
|
/**
|
||
|
* Normalizes the provided angle to the range -pi to pi.
|
||
|
* @param angle The angle in radians to be normalized.
|
||
|
*/
|
||
|
export function normalizeRadians(angle: any): number;
|
||
|
/**
|
||
|
* Computes the angle of rotation between two anchor points.
|
||
|
* @param point1 First anchor point
|
||
|
* @param point2 Second anchor point
|
||
|
*/
|
||
|
export function computeRotation(point1: any, point2: any): number;
|
||
|
export function radToDegrees(rad: any): number;
|
||
|
export function buildTranslationMatrix(x: any, y: any): any[][];
|
||
|
export function dot(v1: any, v2: any): number;
|
||
|
export function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
|
||
|
export function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
|
||
|
export function buildRotationMatrix(rotation: any, center: any): number[][];
|
||
|
export function invertTransformMatrix(matrix: any): any[][];
|
||
|
export function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];
|
||
|
export function xyDistanceBetweenPoints(a: any, b: any): number;
|
||
|
}
|
||
|
declare module "blazeface/coords" {
|
||
|
export const MESH_ANNOTATIONS: {
|
||
|
silhouette: number[];
|
||
|
lipsUpperOuter: number[];
|
||
|
lipsLowerOuter: number[];
|
||
|
lipsUpperInner: number[];
|
||
|
lipsLowerInner: number[];
|
||
|
rightEyeUpper0: number[];
|
||
|
rightEyeLower0: number[];
|
||
|
rightEyeUpper1: number[];
|
||
|
rightEyeLower1: number[];
|
||
|
rightEyeUpper2: number[];
|
||
|
rightEyeLower2: number[];
|
||
|
rightEyeLower3: number[];
|
||
|
rightEyebrowUpper: number[];
|
||
|
rightEyebrowLower: number[];
|
||
|
rightEyeIris: number[];
|
||
|
leftEyeUpper0: number[];
|
||
|
leftEyeLower0: number[];
|
||
|
leftEyeUpper1: number[];
|
||
|
leftEyeLower1: number[];
|
||
|
leftEyeUpper2: number[];
|
||
|
leftEyeLower2: number[];
|
||
|
leftEyeLower3: number[];
|
||
|
leftEyebrowUpper: number[];
|
||
|
leftEyebrowLower: number[];
|
||
|
leftEyeIris: number[];
|
||
|
midwayBetweenEyes: number[];
|
||
|
noseTip: number[];
|
||
|
noseBottom: number[];
|
||
|
noseRightCorner: number[];
|
||
|
noseLeftCorner: number[];
|
||
|
rightCheek: number[];
|
||
|
leftCheek: number[];
|
||
|
};
|
||
|
export const MESH_TO_IRIS_INDICES_MAP: {
|
||
|
key: string;
|
||
|
indices: number[];
|
||
|
}[];
|
||
|
export const UV468: number[][];
|
||
|
export const TRI468: number[];
|
||
|
export const TRI68: number[];
|
||
|
export const TRI33: number[];
|
||
|
export const TRI7: number[];
|
||
|
export const VTX68: number[];
|
||
|
export const VTX33: number[];
|
||
|
export const VTX7: number[];
|
||
|
export const UV68: number[][];
|
||
|
export const UV33: number[][];
|
||
|
export const UV7: number[][];
|
||
|
}
|
||
|
declare module "blazeface/facepipeline" {
|
||
|
export class Pipeline {
|
||
|
storedBoxes: any;
|
||
|
boundingBoxDetector: any;
|
||
|
meshDetector: any;
|
||
|
irisModel: any;
|
||
|
boxSize: number;
|
||
|
meshSize: number;
|
||
|
irisSize: number;
|
||
|
irisEnlarge: number;
|
||
|
skipped: number;
|
||
|
detectedFaces: number;
|
||
|
constructor(boundingBoxDetector: any, meshDetector: any, irisModel: any);
|
||
|
transformRawCoords(rawCoords: any, box: any, angle: any, rotationMatrix: any): any;
|
||
|
getLeftToRightEyeDepthDifference(rawCoords: any): number;
|
||
|
getEyeBox(rawCoords: any, face: any, eyeInnerCornerIndex: any, eyeOuterCornerIndex: any, flip?: boolean): {
|
||
|
box: {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
landmarks: any;
|
||
|
};
|
||
|
boxSize: number[];
|
||
|
crop: any;
|
||
|
};
|
||
|
getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): {
|
||
|
rawCoords: any[][];
|
||
|
iris: any[][];
|
||
|
};
|
||
|
getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any;
|
||
|
predict(input: any, config: any): Promise<any>;
|
||
|
calculateLandmarksBoundingBox(landmarks: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: number[];
|
||
|
landmarks: any;
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
declare module "blazeface/facemesh" {
|
||
|
export class MediaPipeFaceMesh {
|
||
|
facePipeline: any;
|
||
|
config: any;
|
||
|
constructor(blazeFace: any, blazeMeshModel: any, irisModel: any, config: any);
|
||
|
estimateFaces(input: any, config: any): Promise<{
|
||
|
confidence: any;
|
||
|
boxConfidence: any;
|
||
|
faceConfidence: any;
|
||
|
box: any;
|
||
|
mesh: any;
|
||
|
boxRaw: any;
|
||
|
meshRaw: any;
|
||
|
annotations: any;
|
||
|
image: any;
|
||
|
}[]>;
|
||
|
}
|
||
|
export function load(config: any): Promise<MediaPipeFaceMesh>;
|
||
|
}
|
||
|
declare module "profile" {
|
||
|
export const data: {};
|
||
|
export function run(name: string, raw: any): void;
|
||
|
}
|
||
|
declare module "age/age" {
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function predict(image: any, config: any): Promise<unknown>;
|
||
|
}
|
||
|
declare module "gender/gender" {
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function predict(image: any, config: any): Promise<unknown>;
|
||
|
}
|
||
|
declare module "emotion/emotion" {
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function predict(image: any, config: any): Promise<unknown>;
|
||
|
}
|
||
|
declare module "embedding/embedding" {
|
||
|
type Tensor = {};
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function simmilarity(embedding1: any, embedding2: any, order?: number): Number;
|
||
|
export function match(embedding: Array<Number>, db: Array<any>, threshold?: number): {
|
||
|
simmilarity: number;
|
||
|
name: string;
|
||
|
source: string;
|
||
|
embedding: never[];
|
||
|
};
|
||
|
export function enhance(input: any): Tensor;
|
||
|
export function predict(input: any, config: any): Promise<number[]>;
|
||
|
}
|
||
|
declare module "posenet/modelBase" {
|
||
|
export class BaseModel {
|
||
|
model: any;
|
||
|
constructor(model: any);
|
||
|
predict(input: any): any;
|
||
|
dispose(): void;
|
||
|
}
|
||
|
}
|
||
|
declare module "posenet/heapSort" {
|
||
|
export class MaxHeap {
|
||
|
priorityQueue: any;
|
||
|
numberOfElements: number;
|
||
|
getElementValue: any;
|
||
|
constructor(maxSize: any, getElementValue: any);
|
||
|
enqueue(x: any): void;
|
||
|
dequeue(): any;
|
||
|
empty(): boolean;
|
||
|
size(): number;
|
||
|
all(): any;
|
||
|
max(): any;
|
||
|
swim(k: any): void;
|
||
|
sink(k: any): void;
|
||
|
getValueAt(i: any): any;
|
||
|
less(i: any, j: any): boolean;
|
||
|
exchange(i: any, j: any): void;
|
||
|
}
|
||
|
}
|
||
|
declare module "posenet/buildParts" {
|
||
|
import * as heapSort from "posenet/heapSort";
|
||
|
export function buildPartWithScoreQueue(scoreThreshold: any, localMaximumRadius: any, scores: any): heapSort.MaxHeap;
|
||
|
}
|
||
|
declare module "posenet/keypoints" {
|
||
|
export const partNames: string[];
|
||
|
export const NUM_KEYPOINTS: any;
|
||
|
export const partIds: any;
|
||
|
export const connectedPartIndices: any[][];
|
||
|
export const poseChain: string[][];
|
||
|
export const partChannels: string[];
|
||
|
}
|
||
|
declare module "posenet/vectors" {
|
||
|
export function getOffsetPoint(y: any, x: any, keypoint: any, offsets: any): {
|
||
|
y: any;
|
||
|
x: any;
|
||
|
};
|
||
|
export function getImageCoords(part: any, outputStride: any, offsets: any): {
|
||
|
x: any;
|
||
|
y: any;
|
||
|
};
|
||
|
export function fillArray(element: any, size: any): any[];
|
||
|
export function clamp(a: any, min: any, max: any): any;
|
||
|
export function squaredDistance(y1: any, x1: any, y2: any, x2: any): number;
|
||
|
export function addVectors(a: any, b: any): {
|
||
|
x: any;
|
||
|
y: any;
|
||
|
};
|
||
|
export function clampVector(a: any, min: any, max: any): {
|
||
|
y: any;
|
||
|
x: any;
|
||
|
};
|
||
|
}
|
||
|
declare module "posenet/decoders" {
|
||
|
export function getPointsConfidence(heatmapScores: any, heatMapCoords: any): Float32Array;
|
||
|
export function getOffsetVectors(heatMapCoordsBuffer: any, offsetsBuffer: any): any;
|
||
|
export function getOffsetPoints(heatMapCoordsBuffer: any, outputStride: any, offsetsBuffer: any): any;
|
||
|
export function argmax2d(inputs: any): any;
|
||
|
}
|
||
|
declare module "posenet/decodePose" {
|
||
|
export function decodePose(root: any, scores: any, offsets: any, outputStride: any, displacementsFwd: any, displacementsBwd: any): any[];
|
||
|
export function decodeSinglePose(heatmapScores: any, offsets: any, minScore: any): Promise<{
|
||
|
keypoints: {
|
||
|
position: {
|
||
|
y: any;
|
||
|
x: any;
|
||
|
};
|
||
|
part: string;
|
||
|
score: number;
|
||
|
}[];
|
||
|
score: number;
|
||
|
}>;
|
||
|
}
|
||
|
declare module "posenet/decodeMultiple" {
|
||
|
export function decodeMultiplePoses(scoresBuffer: any, offsetsBuffer: any, displacementsFwdBuffer: any, displacementsBwdBuffer: any, nmsRadius: any, maxDetections: any, scoreThreshold: any): {
|
||
|
keypoints: any;
|
||
|
score: number;
|
||
|
}[];
|
||
|
}
|
||
|
declare module "posenet/util" {
|
||
|
export function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
|
||
|
export function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
|
||
|
export function getBoundingBox(keypoints: any): any;
|
||
|
export function getBoundingBoxPoints(keypoints: any): {
|
||
|
x: any;
|
||
|
y: any;
|
||
|
}[];
|
||
|
export function toTensorBuffers3D(tensors: any): Promise<[unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown]>;
|
||
|
export function scalePose(pose: any, scaleY: any, scaleX: any): {
|
||
|
score: any;
|
||
|
keypoints: any;
|
||
|
};
|
||
|
export function resizeTo(image: any, [targetH, targetW]: [any, any]): any;
|
||
|
export function scaleAndFlipPoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any;
|
||
|
}
|
||
|
declare module "posenet/posenet" {
|
||
|
export class PoseNet {
|
||
|
baseModel: any;
|
||
|
inputSize: number;
|
||
|
constructor(model: any);
|
||
|
estimatePoses(input: any, config: any): Promise<unknown>;
|
||
|
dispose(): void;
|
||
|
}
|
||
|
export function load(config: any): Promise<PoseNet>;
|
||
|
}
|
||
|
declare module "handpose/box" {
|
||
|
export function getBoxSize(box: any): number[];
|
||
|
export function getBoxCenter(box: any): any[];
|
||
|
export function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
|
||
|
export function scaleBoxCoordinates(box: any, factor: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: number[];
|
||
|
palmLandmarks: any;
|
||
|
confidence: any;
|
||
|
};
|
||
|
export function enlargeBox(box: any, factor?: number): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
palmLandmarks: any;
|
||
|
};
|
||
|
export function squarifyBox(box: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
palmLandmarks: any;
|
||
|
};
|
||
|
export function shiftBox(box: any, shiftFactor: any): {
|
||
|
startPoint: any[];
|
||
|
endPoint: any[];
|
||
|
palmLandmarks: any;
|
||
|
};
|
||
|
}
|
||
|
declare module "handpose/handdetector" {
|
||
|
export class HandDetector {
|
||
|
model: any;
|
||
|
anchors: any;
|
||
|
anchorsTensor: any;
|
||
|
inputSize: number;
|
||
|
inputSizeTensor: any;
|
||
|
doubleInputSizeTensor: any;
|
||
|
constructor(model: any, inputSize: any, anchorsAnnotated: any);
|
||
|
normalizeBoxes(boxes: any): any;
|
||
|
normalizeLandmarks(rawPalmLandmarks: any, index: any): any;
|
||
|
getBoxes(input: any, config: any): Promise<{
|
||
|
box: any;
|
||
|
palmLandmarks: any;
|
||
|
confidence: number;
|
||
|
}[]>;
|
||
|
estimateHandBounds(input: any, config: any): Promise<{}[]>;
|
||
|
}
|
||
|
}
|
||
|
declare module "handpose/util" {
|
||
|
export function normalizeRadians(angle: any): number;
|
||
|
export function computeRotation(point1: any, point2: any): number;
|
||
|
export const buildTranslationMatrix: (x: any, y: any) => any[][];
|
||
|
export function dot(v1: any, v2: any): number;
|
||
|
export function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
|
||
|
export function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
|
||
|
export function buildRotationMatrix(rotation: any, center: any): number[][];
|
||
|
export function invertTransformMatrix(matrix: any): any[][];
|
||
|
export function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];
|
||
|
}
|
||
|
declare module "handpose/handpipeline" {
|
||
|
export class HandPipeline {
|
||
|
handDetector: any;
|
||
|
landmarkDetector: any;
|
||
|
inputSize: number;
|
||
|
storedBoxes: any;
|
||
|
skipped: number;
|
||
|
detectedHands: number;
|
||
|
constructor(handDetector: any, landmarkDetector: any, inputSize: any);
|
||
|
getBoxForPalmLandmarks(palmLandmarks: any, rotationMatrix: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
palmLandmarks: any;
|
||
|
};
|
||
|
getBoxForHandLandmarks(landmarks: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: any[];
|
||
|
palmLandmarks: any;
|
||
|
};
|
||
|
transformRawCoords(rawCoords: any, box2: any, angle: any, rotationMatrix: any): any;
|
||
|
estimateHands(image: any, config: any): Promise<{}[]>;
|
||
|
calculateLandmarksBoundingBox(landmarks: any): {
|
||
|
startPoint: number[];
|
||
|
endPoint: number[];
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
declare module "handpose/anchors" {
|
||
|
export const anchors: {
|
||
|
w: number;
|
||
|
h: number;
|
||
|
x_center: number;
|
||
|
y_center: number;
|
||
|
}[];
|
||
|
}
|
||
|
declare module "handpose/handpose" {
|
||
|
export class HandPose {
|
||
|
handPipeline: any;
|
||
|
constructor(handPipeline: any);
|
||
|
static getAnnotations(): {
|
||
|
thumb: number[];
|
||
|
indexFinger: number[];
|
||
|
middleFinger: number[];
|
||
|
ringFinger: number[];
|
||
|
pinky: number[];
|
||
|
palmBase: number[];
|
||
|
};
|
||
|
estimateHands(input: any, config: any): Promise<{
|
||
|
confidence: number;
|
||
|
box: any;
|
||
|
boxRaw: any;
|
||
|
landmarks: any;
|
||
|
annotations: any;
|
||
|
}[]>;
|
||
|
}
|
||
|
export function load(config: any): Promise<HandPose>;
|
||
|
}
|
||
|
declare module "blazepose/annotations" {
|
||
|
export const full: string[];
|
||
|
export const upper: string[];
|
||
|
}
|
||
|
declare module "blazepose/blazepose" {
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function predict(image: any, config: any): Promise<{
|
||
|
keypoints: {
|
||
|
id: any;
|
||
|
part: any;
|
||
|
position: {
|
||
|
x;
|
||
|
y;
|
||
|
z;
|
||
|
};
|
||
|
score: any;
|
||
|
presence: any;
|
||
|
}[];
|
||
|
}[] | null>;
|
||
|
}
|
||
|
declare module "nanodet/nanodet" {
|
||
|
export function load(config: any): Promise<any>;
|
||
|
export function predict(image: any, config: any): Promise<unknown>;
|
||
|
}
|
||
|
declare module "gesture/gesture" {
|
||
|
export const body: (res: any) => {
|
||
|
body: number;
|
||
|
gesture: string;
|
||
|
}[];
|
||
|
export const face: (res: any) => {
|
||
|
face: number;
|
||
|
gesture: string;
|
||
|
}[];
|
||
|
export const iris: (res: any) => {
|
||
|
iris: number;
|
||
|
gesture: string;
|
||
|
}[];
|
||
|
export const hand: (res: any) => {
|
||
|
hand: number;
|
||
|
gesture: string;
|
||
|
}[];
|
||
|
}
|
||
|
declare module "image/image" {
|
||
|
import * as tf from '../../dist/tfjs.esm.js';
|
||
|
export function process(input: any, config: any): {
|
||
|
tensor: tf.Tensor;
|
||
|
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||
|
};
|
||
|
}
|
||
|
declare module "config" {
|
||
|
/**
|
||
|
* Configuration interface definition for **Human** library
|
||
|
*
|
||
|
* Contains all configurable parameters
|
||
|
*/
|
||
|
export interface Config {
|
||
|
backend: String;
|
||
|
wasmPath: String;
|
||
|
debug: Boolean;
|
||
|
async: Boolean;
|
||
|
profile: Boolean;
|
||
|
deallocate: Boolean;
|
||
|
scoped: Boolean;
|
||
|
videoOptimized: Boolean;
|
||
|
warmup: String;
|
||
|
filter: {
|
||
|
enabled: Boolean;
|
||
|
width: Number;
|
||
|
height: Number;
|
||
|
return: Boolean;
|
||
|
brightness: Number;
|
||
|
contrast: Number;
|
||
|
sharpness: Number;
|
||
|
blur: Number;
|
||
|
saturation: Number;
|
||
|
hue: Number;
|
||
|
negative: Boolean;
|
||
|
sepia: Boolean;
|
||
|
vintage: Boolean;
|
||
|
kodachrome: Boolean;
|
||
|
technicolor: Boolean;
|
||
|
polaroid: Boolean;
|
||
|
pixelate: Number;
|
||
|
};
|
||
|
gesture: {
|
||
|
enabled: Boolean;
|
||
|
};
|
||
|
face: {
|
||
|
enabled: Boolean;
|
||
|
detector: {
|
||
|
modelPath: String;
|
||
|
rotation: Boolean;
|
||
|
maxFaces: Number;
|
||
|
skipFrames: Number;
|
||
|
skipInitial: Boolean;
|
||
|
minConfidence: Number;
|
||
|
iouThreshold: Number;
|
||
|
scoreThreshold: Number;
|
||
|
return: Boolean;
|
||
|
};
|
||
|
mesh: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
};
|
||
|
iris: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
};
|
||
|
age: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
skipFrames: Number;
|
||
|
};
|
||
|
gender: {
|
||
|
enabled: Boolean;
|
||
|
minConfidence: Number;
|
||
|
modelPath: String;
|
||
|
skipFrames: Number;
|
||
|
};
|
||
|
emotion: {
|
||
|
enabled: Boolean;
|
||
|
minConfidence: Number;
|
||
|
skipFrames: Number;
|
||
|
modelPath: String;
|
||
|
};
|
||
|
embedding: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
};
|
||
|
};
|
||
|
body: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
maxDetections: Number;
|
||
|
scoreThreshold: Number;
|
||
|
nmsRadius: Number;
|
||
|
};
|
||
|
hand: {
|
||
|
enabled: Boolean;
|
||
|
rotation: Boolean;
|
||
|
skipFrames: Number;
|
||
|
skipInitial: Boolean;
|
||
|
minConfidence: Number;
|
||
|
iouThreshold: Number;
|
||
|
scoreThreshold: Number;
|
||
|
maxHands: Number;
|
||
|
landmarks: Boolean;
|
||
|
detector: {
|
||
|
modelPath: String;
|
||
|
};
|
||
|
skeleton: {
|
||
|
modelPath: String;
|
||
|
};
|
||
|
};
|
||
|
object: {
|
||
|
enabled: Boolean;
|
||
|
modelPath: String;
|
||
|
minConfidence: Number;
|
||
|
iouThreshold: Number;
|
||
|
maxResults: Number;
|
||
|
skipFrames: Number;
|
||
|
};
|
||
|
}
|
||
|
const config: Config;
|
||
|
export { config as defaults };
|
||
|
}
|
||
|
declare module "draw/draw" {
|
||
|
export const drawOptions: {
|
||
|
color: string;
|
||
|
labelColor: string;
|
||
|
shadowColor: string;
|
||
|
font: string;
|
||
|
lineHeight: number;
|
||
|
lineWidth: number;
|
||
|
pointSize: number;
|
||
|
roundRect: number;
|
||
|
drawPoints: Boolean;
|
||
|
drawLabels: Boolean;
|
||
|
drawBoxes: Boolean;
|
||
|
drawPolygons: Boolean;
|
||
|
fillPolygons: Boolean;
|
||
|
useDepth: Boolean;
|
||
|
useCurves: Boolean;
|
||
|
bufferedOutput: Boolean;
|
||
|
useRawBoxes: Boolean;
|
||
|
};
|
||
|
export function gesture(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
|
||
|
export function face(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
|
||
|
export function body(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
|
||
|
export function hand(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
|
||
|
export function object(inCanvas: HTMLCanvasElement, result: Array<any>): Promise<void>;
|
||
|
export function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement): Promise<void>;
|
||
|
export function all(inCanvas: HTMLCanvasElement, result: any): Promise<void>;
|
||
|
}
|
||
|
declare module "result" {
|
||
|
/**
|
||
|
* Result interface definition for **Human** library
|
||
|
*
|
||
|
* Contains all possible detection results
|
||
|
*/
|
||
|
export interface Result {
|
||
|
/** Face results
|
||
|
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
||
|
* Some values may be null if specific model is not enabled
|
||
|
*
|
||
|
* Array of individual results with one object per detected face
|
||
|
* Each result has:
|
||
|
* - overal detection confidence value
|
||
|
* - box detection confidence value
|
||
|
* - mesh detection confidence value
|
||
|
* - box as array of [x, y, width, height], normalized to image resolution
|
||
|
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
|
||
|
* - mesh as array of [x, y, z] points of face mesh, normalized to image resolution
|
||
|
* - meshRaw as array of [x, y, z] points of face mesh, normalized to range 0..1
|
||
|
* - annotations as array of annotated face mesh points
|
||
|
* - age as value
|
||
|
* - gender as value
|
||
|
* - genderConfidence as value
|
||
|
* - emotion as array of possible emotions with their individual scores
|
||
|
* - iris as distance value
|
||
|
* - angle as object with values for roll, yaw and pitch angles
|
||
|
*/
|
||
|
face: Array<{
|
||
|
confidence: Number;
|
||
|
boxConfidence: Number;
|
||
|
faceConfidence: Number;
|
||
|
box: [Number, Number, Number, Number];
|
||
|
boxRaw: [Number, Number, Number, Number];
|
||
|
mesh: Array<[Number, Number, Number]>;
|
||
|
meshRaw: Array<[Number, Number, Number]>;
|
||
|
annotations: Array<{
|
||
|
part: String;
|
||
|
points: Array<[Number, Number, Number]>[];
|
||
|
}>;
|
||
|
age: Number;
|
||
|
gender: String;
|
||
|
genderConfidence: Number;
|
||
|
emotion: Array<{
|
||
|
score: Number;
|
||
|
emotion: String;
|
||
|
}>;
|
||
|
embedding: Array<Number>;
|
||
|
iris: Number;
|
||
|
angle: {
|
||
|
roll: Number;
|
||
|
yaw: Number;
|
||
|
pitch: Number;
|
||
|
};
|
||
|
}>;
|
||
|
/** Body results
|
||
|
*
|
||
|
* Array of individual results with one object per detected body
|
||
|
* Each results has:
|
||
|
* - body id number
|
||
|
* - body part name
|
||
|
* - part position with x,y,z coordinates
|
||
|
* - body part score value
|
||
|
* - body part presence value
|
||
|
*/
|
||
|
body: Array<{
|
||
|
id: Number;
|
||
|
part: String;
|
||
|
position: {
|
||
|
x: Number;
|
||
|
y: Number;
|
||
|
z: Number;
|
||
|
};
|
||
|
score: Number;
|
||
|
presence: Number;
|
||
|
}>;
|
||
|
/** Hand results
|
||
|
*
|
||
|
* Array of individual results with one object per detected hand
|
||
|
* Each result has:
|
||
|
* - confidence as value
|
||
|
* - box as array of [x, y, width, height], normalized to image resolution
|
||
|
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
|
||
|
* - landmarks as array of [x, y, z] points of hand, normalized to image resolution
|
||
|
* - annotations as array of annotated face landmark points
|
||
|
*/
|
||
|
hand: Array<{
|
||
|
confidence: Number;
|
||
|
box: [Number, Number, Number, Number];
|
||
|
boxRaw: [Number, Number, Number, Number];
|
||
|
landmarks: Array<[Number, Number, Number]>;
|
||
|
annotations: Array<{
|
||
|
part: String;
|
||
|
points: Array<[Number, Number, Number]>[];
|
||
|
}>;
|
||
|
}>;
|
||
|
/** Gesture results
|
||
|
*
|
||
|
* Array of individual results with one object per detected gesture
|
||
|
* Each result has:
|
||
|
* - part where gesture was detected
|
||
|
* - gesture detected
|
||
|
*/
|
||
|
gesture: Array<{
|
||
|
part: String;
|
||
|
gesture: String;
|
||
|
}>;
|
||
|
/** Object results
|
||
|
*
|
||
|
* Array of individual results with one object per detected gesture
|
||
|
* Each result has:
|
||
|
* - score as value
|
||
|
* - label as detected class name
|
||
|
* - center as array of [x, y], normalized to image resolution
|
||
|
* - centerRaw as array of [x, y], normalized to range 0..1
|
||
|
* - box as array of [x, y, width, height], normalized to image resolution
|
||
|
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
|
||
|
*/
|
||
|
object: Array<{
|
||
|
score: Number;
|
||
|
strideSize: Number;
|
||
|
class: Number;
|
||
|
label: String;
|
||
|
center: Number[];
|
||
|
centerRaw: Number[];
|
||
|
box: Number[];
|
||
|
boxRaw: Number[];
|
||
|
}>;
|
||
|
performance: {
|
||
|
any: any;
|
||
|
};
|
||
|
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||
|
}
|
||
|
}
|
||
|
declare module "sample" {
|
||
|
export const face = "\n/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA\nAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAARAAAATgAAAAAAAABgAAAAAQAAAGAAAAABcGFpbnQu\nbmV0IDQuMi4xMwAA/9sAQwAGBAUGBQQGBgUGBwcGCAoQCgoJCQoUDg8MEBcUGBgXFBYWGh0lHxob\nIxwWFiAsICMmJykqKRkfLTAtKDAlKCko/9sAQwEHBwcKCAoTCgoTKBoWGigoKCgoKCgoKCgoKCgo\nKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgo/8AAEQgBAAEAAwEhAAIRAQMRAf/E\nAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAE\nEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZH\nSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1\ntre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEB\nAQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXET\nIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFla\nY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXG\nx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A+qaKACigApGOKAML\nXp8xlF5A7V4X8RtYs7PzfNImnx8sa8Kp9z3q2tEgp6angWs62ZZ5CTGoJ6DArGNz5p+UrID6EUrF\nPUlW1EuN0XNW7PQ2L5j3JnoKXN0KijqNP0eYoqXBdgPuuo+ZPeupisWn2Jd4+0r924XgsQOCff3/\nAJ1FzRKxDqGii6m3siiQ8F1XGfXI6YNWLfRbiRQMkcZI9fpTDluT2/h6Qy8gDPbtmtG38JeY480Z\n5zSLUTZg8M28YwYxjAArXtdPt402qgHbpSaLWhma3o0Uqk7Nx9DWLaaVblgPs6qRyds2M/gRSQp9\nzZOni2iWS2hlQ+kjYz9OMGrdjq89vIPPVhj+8M/lQyDq9P1WOYBlMZz1AOD+VdDaTiReOKulK0jO\ntHmi0WDTlr0TyxRVhT8tJjIX+9SUxHXUV553BRQAVBcPhSBTSuxPY86+IGti0s5I7dsORy9fM3i6\n8e8mfDO5P90ZrWWiJicNPpZZtxV/xrW0jQt4DOv6Vk2dEEdTY6BHuB25rpbPSo0QARjP0qTRI17W\nwA/hFaMWmoQMgflQXYsDS142rU9tpqqenfNA7GgtihxkdKuRW6qMY/GkDZY8sY4Ap4hXbyB+VArk\nEtuH4wPyrk/EGkOm+a3jw3suRQLc5i38SX9hJ9nnY+XnBUdPyNdFY6pa3KkkAE9l6f8AfJ/pSJT6\nGhDmI+Zb4ZRycdv6ium0nUhKFydrelTsNnS2829RnrVgV6NKXNG55lWPLIM81Op+WrZkRMfmNNzT\nA7GivPO4KKAEY4XNYWt3vkwPg4OK0giJdjw/xrqhm87Zs8tc7pX5A+leSajf6aHYJ50kn4AZpTep\nrBWRm2Vobm4BXfyehPFdnpmnBFUY5rI2SN63tlToK0YI+KZpFF+3QdavwoKTLtoW0Toaswpk5pCb\nLCxipAhoIuP2dKevHXoaYDylRyxhlwRQI4nxVoCXWZI1GfpXGtbSWjYPGP73+NIGupt6TqMsLruZ\nih4xnP5V09mQ+JLd8gn0xSYJnVaVdkook69K34zuUGunDS3Rx4qOzHVIp4rrOMY3NJQI7GivPO8K\nKAILt9kZrz3xlebYiu8KCCWb0XvW0NFch6ysfO3jLVjfXLIn+pQkKorl7WxNxIPl71g2dUUdpo+l\npBGvHPet23iC8ihFosrxirkHQUFo0IF4FXI1O726CpKLacCrMJoJLYHAPpTwucHpSRJJ5e4AZI9x\nUqpxzVpCuOC8cUpQUMRnXttuB4rjNdsYyeVwfXpmpGmcvcQyafMCFJjPY10eg34BUg4DcZP8jUO4\nHaRq3lLNF+IHet7R7jz7c56rwa2wz9+xhiVeFy/T1PFegeaNPWigDsc0ZrzzvDNIaAM7VpNqdegr\nxL4l6kywyRhseZ19lrdfAZL4jxYg3Fw20d63tJsdrDI5rm3Z3R0R0Mce1eKnQYAplIkWrMJ45oZS\nNO3PHbNXIyfpSGWowSOasxLUiZdjFSqtNEMkUemKlAGKsRJjAppFAiORMjmsTVrNZEO4cfSoZSOD\n1eJ7WXBUzQZ+7nkfSo7e2Ei+ZaMzxntjBX2NSU1Y6/wxqojiEFzkA8KTXYaUoWRyv3W5rSjpNHPX\n+BmpSg8V6J5gUUAdhRXnneFFAGHrTfu5PpXzj8S70/aZtxzztXFbv4DKHxHI+H4GZiz9zxXXW8G3\nGBXMjvLRXAx0oPGPSmMVeOnWrMTYpFI0bcg1fh54xmgovRcD3qxETSIZcRvzp+/BpEkqsBUqsM9K\nq4Em4Gkxk0yRGXrVW6i8yFhkg+tJjRxGsWrxllkUMh9eK5uMz6bcebbnfG33kPcVkay2OntPKuo0\nnhXI67c8qa7Lw3c+adjcEDGK1paSRhVV4s6A0or0jyRRQ1AHX0V553hRQBz+vNtt5z3xXzX8Qbdm\nuic5YnOMdK3l8JnTXvlbwpYl+WySOgrp5YfLOOB9O1c62O7qQkc+9RsKChFPWp4DluOlSykaNruH\nArUgHShFNF2NT1qxGO3NBmyxGcE1N2560CFzjrUysO9JAPDDjFOVuKoQuSRTWouBkazbCa3cd8cV\nwF7IISQccHBzUSWpV9C3o1x5b5GAjdQD1rs9DjC3kckbEhqKfxIzn8LOupRXqnkPccBSkUAzraK8\n87wooA5rxMSI3HqK8B8bQl9Q8sffY5b/AAraXwkUviNrw9pH2W1ViMMRTdRjw4HpWNtDti9TPc4P\nFQs2M5qdyyMHLcfjV63HTAoBGtap0wK0YxigpsuRDtVhVYd6GQydVwwIqdRnqKCR23I5pCMUW6gD\nYNKuetAEise9KTxQBWuFyhrznxNZkXjFeN3I+tTIZg2OqmzmxNF0PO3vXp/g2+hukVl4zyPanTXv\nJmVR+60dpThXpnlPceopWFAbnV0V553hSGgRynjC5FujOey14Ssp1HxNmTnc+a3kvcIpv37HoEYQ\nQmMdVHSsnVbYJF5jVk0dsNzlruVIsl2wKxbjWrVHILjg1CRbZJb+ILHPzyhfStODWLQgFJFYd+el\nUJM27HUIXxhga1Y5lLVLKLkMnoauxnPPrSEx7ShF+Y/n2qrc6xBbhizDAqkK1zJuvG9nbg8ZA681\nly/Ei052RO3uKAsZlx8QGd8xxvt9Aa1NH8dK7AXMcip64zigdkdrZX8F7EJLdwwNXMkrz1qRMRly\nCK4TxmpidWI49felPYSOMmi80NIoOV6qRzXYeA5SskYPfirpfEjGr8LPWVHyD6U4CvQPL3ZItOYc\nUDOoNFeed4Uhpks4H4iE/Z5MeleMeGULeLgjds10S+BGdL+Jc9OSBU2Huc5Nc74yvUtrcDBrJnZF\n63PJdXvLy/lKWw46bvQVz82jXhkLO5Y+9ZlsYthcRnbIjY9R3q3awTRkEM3WmJI6C0ea3dGRsr1x\nXY6TqW9FLHnjrUs0izpLK5DDjofSta3ckH09KRUkZuuTvFGdvPauE1Y3U6Mqbssf/rUxHPTaJPK2\nZmJPbBqzY6DC
|
||
|
export const body = "\n/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAsICAoIBwsKCQoNDAsNERwSEQ8PESIZGhQcKSQrKigk\nJyctMkA3LTA9MCcnOEw5PUNFSElIKzZPVU5GVEBHSEX/2wBDAQwNDREPESESEiFFLicuRUVFRUVF\nRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUX/wAARCASwBLADASIA\nAhEBAxEB/8QAGwABAAIDAQEAAAAAAAAAAAAAAAEDAgQFBgf/xABDEAEAAgECBAMECQIDBgUFAQAA\nAQIDBBEFEiExE0FRBiJhcRQjMkJSgZGhsWLBJDNyFSVTY3OSNEPR4fAHFjWCokT/xAAYAQEAAwEA\nAAAAAAAAAAAAAAAAAQIDBP/EACARAQEBAQADAQEBAQEBAAAAAAABAhEDITFBEjJRIhP/2gAMAwEA\nAhEDEQA/APqYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAKNTq8OkxzfNkisQC8eb1XtRNbzXT4q7eU2nu0MntRq/D8StMccvW29ZmdvgjsTyvZjxOLj\n+s8WLxn8TFPXs6Oj9oct7c14rkxz22nrB2I49KOdTjelmszfmpMeUxv/AA28OqwZ4icWWtt/SUi4\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmdo3nsPNe0Pt\nFh09Z0+DNWL7+9O/7A3eJcZppsV5raI27esvH6jX5ddM25p79Ilo59VbUZOe2Tm/PeGvfPfT2iKR\nPLv1+DO678XmW/a97U6TtOyzTbTF538/T9WjTNecm9a7126tqk3rSYxY5ta1plRZqZNXGjyZcPXl\nmZmsx+qjBrsuO16xM7eXRt04JrdTltk5OWJnfaWf0a2lty5MdZnfzSn+WOHiOutFpjHa9e8bQ2fp\n+alYy462pk7zXbuxjPesbRS0f6ZZV1ET1tErzXFLHo+A+1ddZf6NrI8PJHa1vN6iJi0bxMTHwfOa\nzhzd61v1846utwniM6DUdb3nBaNrVmd9vjC/ZVePYirBqMWppz4rxaPgtEAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAItaK1m09ojcHnvarjM8P0vh49+a/eY8ng9D\nh1fGM1rxjtGPfvbzdbjuTJxHX48cTPNltM/KsS9Dw7S49Jp6UpHaGe2vjz1y9J7LYK13vHWe7bj2\nex1tvM80ekuxW3RnW3Vm6P5jRx8H0+OYmMcb+bapo8GKPdpC6bQwtdHU8JpWkdJ/JweL6e23iU67\nd4dubSqyVi9Zi0bwIs68XGp36TtEq7ZJmZmevzdbifCKWtbJinkt6eTgZPFw32t+sRurbWVzxs1y\nRv6T8V1NZNPtfq0seTm+Kevr+SZuxXjvaPiV8N4viycto9HseG6+uu08W6Rkj7UPmFck1tE1nlmP\nLd3eA8V8HVVi1pjq6Ma/pnqce/ERMTETHaUrKgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAADW19+TQ5p/p2bLS4v04Zmt5VjeQeJ4bjnLqsupv+Ka1+ERLv4reTmcNxcuC\nvy3l0qdI2hlr66sT02ot0ZV7qqrInruzrVZLGSZ37JjqgYTG0K5lbaFVhDT1Ub456RPweY4hixWi\neSdpjvD1eWejz3FNHWYtkpvFo9EIseb3tS3SerOms22rfpPqZKzvvHSYUz70TExG6Gdbs2rljeJ/\nMx5L0vEzPaelnOi98c9J2bFNTFpit47+a+PVUvx9T9nOIfT+GV5p3yY/ds67wvsXqpxau+G09Lx+\nr3TqrEAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADV4ljnLw3U0jvO\nO0fs2lWqyUw6XLkyfYrWZkHldBEV09eveG3Fq1mI3jd4vPrOIaid8G9MP3Y38k6fNrt/rMk9Ou8s\ntfXXn49rGWInuy8SO/k5Gl1E3rG/fzbOe94wTy99mbRvTrMOOvNfJWsesywniukrG/jU6fF43WYN\nTmtEeJtEQ06aSmK2+bNtEd+qfSO17unF9Hmvy1y13XWyVmN4tExLxVK8PmNq5NrT58zawam+m/yc\n0Xj8NpRYSvQZ7xEOdqI3rPozxayNRXe0ct/ON03jmrKB5nV4q1yTO20Obmv4c+cx8HoeI6WZpNoj\nq83niYmYscU0r8aJ6T1n49zeJ+Meqm1drb9J+Kd5p136StGVem9l9TbHxLDFp7W7+sS+q1nesT6w\n+PcAzVjiGHftzQ+v4f8AJpv6On8jH9ZgIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAABp8VrW/C9TW0ztOO3b5Nxp8VmI4bn37TWYB8f1HFtTfUfR9FWJmsdZ9I7MtJxDX5s\nd8ta1y0xzteaR2277rcuhycP12SceLxMeWNpjttHwlu8I0mfQ1y+D7k5YmJmY36T36Ka43z/AF1t\ncI1ds+qxVj7/AEej19PCw9HJ4NoK4OIU5Y35YmZdzVTGebVZabx5jJS+Tmns81rNLm1Wrzc9rVw4\nYibbem72mXTTS0w0M3BvEta1bWrM95ie5EanY87wXgNOL6XPfxraXLhra/W28bR/dzYzarBqJxRe\nbzE7Rt5vWU9n8mPHOGmS0Ypnea1naJb+k9ncNLR7u2y/WcxXO4TOoyUrN6zD0FaW5Y3hu49FiwUi\nKxCvLMR0hlW0jn6ukWw3iXjOJzbDlneOj3GaN6zDzfFOH+LE7SRGo83XNSZ2lbG2/WfdlvaT2cy6\nrNFInlrv1mfJ37cK4PwTTxOoidRm2+/2/KFuyMp47XB4LivXiunrH2b2iH2qn2K/J8x4fGDNxTSZ\n9Nh8OviRvTyfT6xtWI+DeXs9MNZubypASqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAOZx6/LoOWPvWiHTcf2hiZ0e8fc2mf1E5+vP/AEeuSd7RC2uKtI6QjHfeINTfwtPf\nJvty9WPfbt/lucP03gxfJf7d/wBoReYpm97zaNeLb4Ims9Nt94auDjem1Wo5PFi1onylS+1o7l8V\nbxvtupjDMdNkYtXS1+Stt+m63xImEJ4xjHER2ZxMUjeUTO3VRmydBbjLJqPi08mbeVOXJPq1sl5Q\nVbkz9+rRy35rxHqzmZlVEe/Ez5LRlW5iyfR6zffaIjq1OSNZps2a21rZInafSPJhxGMl9LStLRWM\nlorM/A4dkrWbYfLZC2W/7K6eubX6b4RzT+W76K8b7G6X62cu3Sten59nsm3j+OXz3/0ANGIAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAA
|
||
|
}
|
||
|
declare module "human" {
|
||
|
import * as tf from '../dist/tfjs.esm.js';
|
||
|
import * as facemesh from "blazeface/facemesh";
|
||
|
import * as age from "age/age";
|
||
|
import * as gender from "gender/gender";
|
||
|
import * as emotion from "emotion/emotion";
|
||
|
import * as posenet from "posenet/posenet";
|
||
|
import * as handpose from "handpose/handpose";
|
||
|
import * as blazepose from "blazepose/blazepose";
|
||
|
import * as nanodet from "nanodet/nanodet";
|
||
|
import * as draw from "draw/draw";
|
||
|
import { Config } from "config";
|
||
|
import { Result } from "result";
|
||
|
type Tensor = {};
|
||
|
type Model = {};
|
||
|
export type { Config } from "config";
|
||
|
export type { Result } from "result";
|
||
|
/** Defines all possible input types for **Human** detection */
|
||
|
export type Input = Tensor | ImageData | ImageBitmap | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||
|
/** Error message */
|
||
|
export type Error = {
|
||
|
error: String;
|
||
|
};
|
||
|
export type TensorFlow = typeof tf;
|
||
|
/**
|
||
|
* **Human** library main class
|
||
|
*
|
||
|
* All methods and properties are available only as members of Human class
|
||
|
*
|
||
|
* - Configuration object definition: {@link Config}
|
||
|
* - Results object definition: {@link Result}
|
||
|
* - Possible inputs: {@link Input}
|
||
|
*/
|
||
|
export class Human {
|
||
|
#private;
|
||
|
version: String;
|
||
|
config: Config;
|
||
|
state: String;
|
||
|
image: {
|
||
|
tensor: Tensor;
|
||
|
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||
|
};
|
||
|
tf: TensorFlow;
|
||
|
draw: {
|
||
|
drawOptions?: typeof draw.drawOptions;
|
||
|
gesture: typeof draw.gesture;
|
||
|
face: typeof draw.face;
|
||
|
body: typeof draw.body;
|
||
|
hand: typeof draw.hand;
|
||
|
canvas: typeof draw.canvas;
|
||
|
all: typeof draw.all;
|
||
|
};
|
||
|
models: {
|
||
|
face: facemesh.MediaPipeFaceMesh | null;
|
||
|
posenet: posenet.PoseNet | null;
|
||
|
blazepose: Model | null;
|
||
|
handpose: handpose.HandPose | null;
|
||
|
iris: Model | null;
|
||
|
age: Model | null;
|
||
|
gender: Model | null;
|
||
|
emotion: Model | null;
|
||
|
embedding: Model | null;
|
||
|
nanodet: Model | null;
|
||
|
};
|
||
|
classes: {
|
||
|
facemesh: typeof facemesh;
|
||
|
age: typeof age;
|
||
|
gender: typeof gender;
|
||
|
emotion: typeof emotion;
|
||
|
body: typeof posenet | typeof blazepose;
|
||
|
hand: typeof handpose;
|
||
|
nanodet: typeof nanodet;
|
||
|
};
|
||
|
sysinfo: {
|
||
|
platform: String;
|
||
|
agent: String;
|
||
|
};
|
||
|
constructor(userConfig?: Config | Object);
|
||
|
profileData(): {
|
||
|
newBytes: any;
|
||
|
newTensors: any;
|
||
|
peakBytes: any;
|
||
|
numKernelOps: any;
|
||
|
timeKernelOps: any;
|
||
|
slowestKernelOps: any;
|
||
|
largestKernelOps: any;
|
||
|
} | {};
|
||
|
simmilarity(embedding1: Array<Number>, embedding2: Array<Number>): Number;
|
||
|
enhance(input: Tensor): Tensor | null;
|
||
|
match(faceEmbedding: Array<Number>, db: Array<{
|
||
|
name: String;
|
||
|
source: String | undefined;
|
||
|
embedding: Array<Number>;
|
||
|
}>, threshold?: number): {
|
||
|
name: String;
|
||
|
source: String | undefined;
|
||
|
simmilarity: Number;
|
||
|
embedding: Array<Number>;
|
||
|
};
|
||
|
load(userConfig?: Config | Object): Promise<void>;
|
||
|
detect(input: Input, userConfig?: Config | Object): Promise<Result | Error>;
|
||
|
warmup(userConfig?: Config | Object): Promise<Result | {
|
||
|
error: any;
|
||
|
}>;
|
||
|
}
|
||
|
/**
|
||
|
* Class Human is also available as default export
|
||
|
*/
|
||
|
export { Human as default };
|
||
|
}
|