add simple webcam and webrtc demo

pull/193/head
Vladimir Mandic 2021-09-15 13:59:18 -04:00
parent c495d93dc8
commit 6e85d638bf
24 changed files with 1832 additions and 181 deletions

View File

@ -9,8 +9,9 @@
## Changelog
### **HEAD -> main** 2021/09/14 mandic00@live.com
### **HEAD -> main** 2021/09/15 mandic00@live.com
- reorganize tfjs bundle
- experimental custom tfjs bundle - disabled
- add platform and backend capabilities detection
- enhanced automated tests

View File

@ -9,6 +9,22 @@ async function log(...msg) {
}
}
/**
* helper implementation of webrtc
* performs:
* - discovery
* - handshake
* - connct to webrtc stream
* - assign webrtc stream to video element
*
* for development purposes i'm using test webrtc server that reads rtsp stream from a security camera:
* <https://github.com/vladmandic/stream-rtsp>
*
* @param {string} server
* @param {string} streamName
* @param {HTMLVideoElement} elementName
* @return {promise}
*/
async function webRTC(server, streamName, elementName) {
const suuid = streamName;
log('client starting');

View File

@ -904,7 +904,6 @@ async function pwaRegister() {
}
async function main() {
/*
window.addEventListener('unhandledrejection', (evt) => {
// eslint-disable-next-line no-console
console.error(evt.reason || evt);
@ -912,7 +911,6 @@ async function main() {
status('exception error');
evt.preventDefault();
});
*/
log('demo starting ...');

25
demo/simple/index.html Normal file
View File

@ -0,0 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Human</title>
<meta name="viewport" content="width=device-width" id="viewport">
<meta name="keywords" content="Human">
<meta name="application-name" content="Human">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="theme-color" content="#000000">
<link rel="manifest" href="../manifest.webmanifest">
<link rel="shortcut icon" href="../../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../../assets/icon.png">
<script src="./index.js" type="module"></script>
<style>
body { margin: 0; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; text-align: center; }
body::-webkit-scrollbar { display: none; }
</style>
</head>
<body>
<canvas id="canvas" style="margin: 0 auto;"></canvas>
<video id="video" playsinline style="display: none;"></video>
</body>
</html>

77
demo/simple/index.js Normal file
View File

@ -0,0 +1,77 @@
import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
import webRTC from '../helpers/webrtc.js'; // handle webrtc handshake and connects to webrtc stream
const config = { // use default values
modelBasePath: '../../models',
};
const human = new Human(config);
const webrtc = {
enabled: false, // use webrtc or use webcam if disabled
server: 'http://human.local:8002',
stream: 'reowhite',
};
// eslint-disable-next-line no-console
const log = (...msg) => console.log(...msg);
/** @type {HTMLVideoElement} */
// @ts-ignore
const video = document.getElementById('video') || document.createElement('video'); // used as input
/** @type {HTMLCanvasElement} */
// @ts-ignore
const canvas = document.getElementById('canvas') || document.createElement('canvas'); // used as output
async function webCam() {
const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } }; // set preffered camera options
const stream = await navigator.mediaDevices.getUserMedia(constraints); // get webcam stream that matches constraints
const ready = new Promise((resolve) => { video.onloadeddata = () => resolve(true); }); // resolve when stream is ready
video.srcObject = stream; // assign stream to video element
video.play(); // start stream
await ready; // wait until stream is ready
canvas.width = video.videoWidth; // resize output canvas to match input
canvas.height = video.videoHeight;
log('video stream:', video.srcObject, 'track state:', video.srcObject.getVideoTracks()[0].readyState, 'stream state:', video.readyState);
}
// eslint-disable-next-line no-unused-vars
let result;
async function detectionLoop() {
result = await human.detect(video); // updates result every time detection completes
// eslint-disable-next-line @typescript-eslint/no-unused-vars
requestAnimationFrame(detectionLoop); // run in loop
}
// eslint-disable-next-line no-unused-vars
async function drawLoop() {
const interpolated = await human.next(result); // interpolates results based on last known results
await human.draw.canvas(video, canvas); // draw input video to output canvas
await human.draw.all(canvas, interpolated); // draw results as overlay on output canvas
// eslint-disable-next-line @typescript-eslint/no-unused-vars
requestAnimationFrame(drawLoop); // run in loop
}
// eslint-disable-next-line no-unused-vars
async function singleLoop() {
result = await human.detect(video); // updates result every time detection completes
await human.draw.canvas(video, canvas); // draw input video to output canvas
await human.draw.all(canvas, result); // draw results as overlay on output canvas
// eslint-disable-next-line @typescript-eslint/no-unused-vars
requestAnimationFrame(singleLoop); // run in loop
}
async function main() {
await human.load(); // not required, pre-loads all models
await human.warmup(); // not required, warms up all models
if (webrtc.enabled) await webRTC(webrtc.server, webrtc.stream, video); // setup webrtc as input stream, uses helper implementation in
else await webCam(); // setup webcam as input stream
// preferred run in two loops, one for actual detection and one that draws interpolated results on screen so results appear much smoother
await detectionLoop();
await drawLoop();
// alternative run in single loop where we run detection and then draw results
// await singleLoop();
}
window.onload = main;

View File

@ -4456,10 +4456,16 @@ function process2(input, config3) {
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
return { tensor: null, canvas: inCanvas };
}
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
if (!originalWidth || !originalHeight)
if (!originalWidth || !originalHeight) {
log("cannot determine input dimensions");
return { tensor: null, canvas: inCanvas };
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -10935,18 +10941,17 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
async function canvas2(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
async function canvas2(input, output) {
if (!input || !output)
return;
getCanvasContext(outCanvas2);
const ctx = getCanvasContext(inCanvas2);
ctx.drawImage(inCanvas2, 0, 0);
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2)
return null;
const timestamp = now();
const localOptions = mergeDeep(options2, drawOptions);
if (!result || !inCanvas2)
return null;
const promise = Promise.all([
face2(inCanvas2, result.face, localOptions),
body2(inCanvas2, result.body, localOptions),
@ -12143,8 +12148,6 @@ var Human = class {
var _a;
return (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
});
__publicField(this, "next", (result) => calc(result || this.result));
__publicField(this, "warmup", (userConfig) => warmup(this, userConfig));
get();
this.env = env;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfjs_esm_exports.version_core}/dist/`;
@ -12187,7 +12190,7 @@ var Human = class {
similarity(embedding1, embedding2) {
return similarity(embedding1, embedding2);
}
segmentation(input, background) {
async segmentation(input, background) {
return input ? process5(input, background, this.config) : null;
}
enhance(input) {
@ -12229,6 +12232,12 @@ var Human = class {
if (current > (this.performance.load || 0))
this.performance.load = current;
}
next(result = this.result) {
return calc(result);
}
async warmup(userConfig) {
return warmup(this, userConfig);
}
async detect(input, userConfig) {
return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;

File diff suppressed because one or more lines are too long

31
dist/human.esm.js vendored
View File

@ -64513,10 +64513,16 @@ function process2(input2, config3) {
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input2.shape}`);
} else {
if (typeof input2["readyState"] !== "undefined" && input2["readyState"] <= 2) {
log("input stream is not ready");
return { tensor: null, canvas: inCanvas };
}
const originalWidth = input2["naturalWidth"] || input2["videoWidth"] || input2["width"] || input2["shape"] && input2["shape"][1] > 0;
const originalHeight = input2["naturalHeight"] || input2["videoHeight"] || input2["height"] || input2["shape"] && input2["shape"][2] > 0;
if (!originalWidth || !originalHeight)
if (!originalWidth || !originalHeight) {
log("cannot determine input dimensions");
return { tensor: null, canvas: inCanvas };
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -70992,18 +70998,17 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
async function canvas2(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
async function canvas2(input2, output) {
if (!input2 || !output)
return;
getCanvasContext(outCanvas2);
const ctx = getCanvasContext(inCanvas2);
ctx.drawImage(inCanvas2, 0, 0);
const ctx = getCanvasContext(output);
ctx.drawImage(input2, 0, 0);
}
async function all5(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2)
return null;
const timestamp = now();
const localOptions = mergeDeep(options2, drawOptions);
if (!result || !inCanvas2)
return null;
const promise = Promise.all([
face2(inCanvas2, result.face, localOptions),
body2(inCanvas2, result.body, localOptions),
@ -72200,8 +72205,6 @@ var Human = class {
var _a;
return (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
});
__publicField(this, "next", (result) => calc(result || this.result));
__publicField(this, "warmup", (userConfig) => warmup(this, userConfig));
get3();
this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${version}/dist/`;
@ -72244,7 +72247,7 @@ var Human = class {
similarity(embedding1, embedding2) {
return similarity(embedding1, embedding2);
}
segmentation(input2, background) {
async segmentation(input2, background) {
return input2 ? process5(input2, background, this.config) : null;
}
enhance(input2) {
@ -72286,6 +72289,12 @@ var Human = class {
if (current > (this.performance.load || 0))
this.performance.load = current;
}
next(result = this.result) {
return calc(result);
}
async warmup(userConfig) {
return warmup(this, userConfig);
}
async detect(input2, userConfig) {
return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;

File diff suppressed because one or more lines are too long

162
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -4490,10 +4490,16 @@ function process2(input, config3) {
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
return { tensor: null, canvas: inCanvas };
}
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
if (!originalWidth || !originalHeight)
if (!originalWidth || !originalHeight) {
log("cannot determine input dimensions");
return { tensor: null, canvas: inCanvas };
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -10995,18 +11001,17 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
async function canvas2(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
async function canvas2(input, output) {
if (!input || !output)
return;
getCanvasContext(outCanvas2);
const ctx = getCanvasContext(inCanvas2);
ctx.drawImage(inCanvas2, 0, 0);
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2)
return null;
const timestamp = now();
const localOptions = mergeDeep(options2, drawOptions);
if (!result || !inCanvas2)
return null;
const promise = Promise.all([
face2(inCanvas2, result.face, localOptions),
body2(inCanvas2, result.body, localOptions),
@ -12206,8 +12211,6 @@ var Human = class {
var _a;
return (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
});
__publicField(this, "next", (result) => calc(result || this.result));
__publicField(this, "warmup", (userConfig) => warmup(this, userConfig));
get();
this.env = env;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf24.version_core}/dist/`;
@ -12250,7 +12253,7 @@ var Human = class {
similarity(embedding1, embedding2) {
return similarity(embedding1, embedding2);
}
segmentation(input, background) {
async segmentation(input, background) {
return input ? process5(input, background, this.config) : null;
}
enhance(input) {
@ -12292,6 +12295,12 @@ var Human = class {
if (current > (this.performance.load || 0))
this.performance.load = current;
}
next(result = this.result) {
return calc(result);
}
async warmup(userConfig) {
return warmup(this, userConfig);
}
async detect(input, userConfig) {
return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;

View File

@ -4491,10 +4491,16 @@ function process2(input, config3) {
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
return { tensor: null, canvas: inCanvas };
}
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
if (!originalWidth || !originalHeight)
if (!originalWidth || !originalHeight) {
log("cannot determine input dimensions");
return { tensor: null, canvas: inCanvas };
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -10996,18 +11002,17 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
async function canvas2(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
async function canvas2(input, output) {
if (!input || !output)
return;
getCanvasContext(outCanvas2);
const ctx = getCanvasContext(inCanvas2);
ctx.drawImage(inCanvas2, 0, 0);
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2)
return null;
const timestamp = now();
const localOptions = mergeDeep(options2, drawOptions);
if (!result || !inCanvas2)
return null;
const promise = Promise.all([
face2(inCanvas2, result.face, localOptions),
body2(inCanvas2, result.body, localOptions),
@ -12207,8 +12212,6 @@ var Human = class {
var _a;
return (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
});
__publicField(this, "next", (result) => calc(result || this.result));
__publicField(this, "warmup", (userConfig) => warmup(this, userConfig));
get();
this.env = env;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf24.version_core}/dist/`;
@ -12251,7 +12254,7 @@ var Human = class {
similarity(embedding1, embedding2) {
return similarity(embedding1, embedding2);
}
segmentation(input, background) {
async segmentation(input, background) {
return input ? process5(input, background, this.config) : null;
}
enhance(input) {
@ -12293,6 +12296,12 @@ var Human = class {
if (current > (this.performance.load || 0))
this.performance.load = current;
}
next(result = this.result) {
return calc(result);
}
async warmup(userConfig) {
return warmup(this, userConfig);
}
async detect(input, userConfig) {
return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;

31
dist/human.node.js vendored
View File

@ -4490,10 +4490,16 @@ function process2(input, config3) {
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
return { tensor: null, canvas: inCanvas };
}
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
if (!originalWidth || !originalHeight)
if (!originalWidth || !originalHeight) {
log("cannot determine input dimensions");
return { tensor: null, canvas: inCanvas };
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -10995,18 +11001,17 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
async function canvas2(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
async function canvas2(input, output) {
if (!input || !output)
return;
getCanvasContext(outCanvas2);
const ctx = getCanvasContext(inCanvas2);
ctx.drawImage(inCanvas2, 0, 0);
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2)
return null;
const timestamp = now();
const localOptions = mergeDeep(options2, drawOptions);
if (!result || !inCanvas2)
return null;
const promise = Promise.all([
face2(inCanvas2, result.face, localOptions),
body2(inCanvas2, result.body, localOptions),
@ -12206,8 +12211,6 @@ var Human = class {
var _a;
return (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
});
__publicField(this, "next", (result) => calc(result || this.result));
__publicField(this, "warmup", (userConfig) => warmup(this, userConfig));
get();
this.env = env;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf24.version_core}/dist/`;
@ -12250,7 +12253,7 @@ var Human = class {
similarity(embedding1, embedding2) {
return similarity(embedding1, embedding2);
}
segmentation(input, background) {
async segmentation(input, background) {
return input ? process5(input, background, this.config) : null;
}
enhance(input) {
@ -12292,6 +12295,12 @@ var Human = class {
if (current > (this.performance.load || 0))
this.performance.load = current;
}
next(result = this.result) {
return calc(result);
}
async warmup(userConfig) {
return warmup(this, userConfig);
}
async detect(input, userConfig) {
return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;

View File

@ -492,17 +492,16 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<PersonRe
}
}
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
if (!inCanvas || !outCanvas) return;
getCanvasContext(outCanvas);
const ctx = getCanvasContext(inCanvas);
ctx.drawImage(inCanvas, 0, 0);
export async function canvas(input: HTMLCanvasElement | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) {
if (!input || !output) return;
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions) {
if (!result || !result.performance || !result || !inCanvas) return null;
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return null;
const promise = Promise.all([
face(inCanvas, result.face, localOptions),

View File

@ -150,7 +150,6 @@ export class Human {
#analyzeMemoryLeaks: boolean;
#checkSanity: boolean;
initial: boolean;
// definition end
/**
@ -256,7 +255,7 @@ export class Human {
* @param background?: {@link Input}
* @returns Canvas
*/
segmentation(input: Input, background?: Input) {
async segmentation(input: Input, background?: Input) {
return input ? segmentation.process(input, background, this.config) : null;
}
@ -327,14 +326,19 @@ export class Human {
* @param result?: {@link Result} optional use specific result set to run interpolation on
* @returns result: {@link Result}
*/
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
next(result: Result = this.result) {
return interpolate.calc(result) as Result;
}
/** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
*/
warmup = (userConfig?: Partial<Config>) => warmups.warmup(this, userConfig) as Promise<Result | { error }>
async warmup(userConfig?: Partial<Config>): Promise<Result | { error }> {
return warmups.warmup(this, userConfig) as Promise<Result | { error }>;
}
/** Main detection method
* - Analyze configuration: {@link Config}

View File

@ -7,6 +7,7 @@ import * as fxImage from './imagefx';
import type { Tensor } from '../tfjs/types';
import type { Config } from '../config';
import { env } from '../env';
import { log } from '../helpers';
type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
@ -62,9 +63,16 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
else throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
} else {
// check if resizing will be needed
if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {
log('input stream is not ready');
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
}
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
if (!originalWidth || !originalHeight) return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
if (!originalWidth || !originalHeight) {
log('cannot determine input dimensions');
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
}
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (targetWidth > maxSize) {
@ -153,7 +161,6 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
outCanvas = inCanvas;
if (fx) fx = null;
}
// create tensor from image if tensor is not already defined
if (!tensor) {
let pixels;

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -48,6 +48,6 @@ export declare function body(inCanvas: HTMLCanvasElement, result: Array<BodyResu
export declare function hand(inCanvas: HTMLCanvasElement, result: Array<HandResult>, drawOptions?: DrawOptions): Promise<void>;
export declare function object(inCanvas: HTMLCanvasElement, result: Array<ObjectResult>, drawOptions?: DrawOptions): Promise<void>;
export declare function person(inCanvas: HTMLCanvasElement, result: Array<PersonResult>, drawOptions?: DrawOptions): Promise<void>;
export declare function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement): Promise<void>;
export declare function canvas(input: HTMLCanvasElement | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement): Promise<void>;
export declare function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions): Promise<[void, void, void, void, void] | null>;
//# sourceMappingURL=draw.d.ts.map

View File

@ -1 +1 @@
{"version":3,"file":"draw.d.ts","sourceRoot":"","sources":["../../../src/draw/draw.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAEvH;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;IACnB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,SAAS,EAAE,OAAO,CAAC;IACnB,cAAc,EAAE,OAAO,CAAC;CACzB;AAED,eAAO,MAAM,OAAO,EAAE,WAkBrB,CAAC;AA2EF,wBAAsB,OAAO,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAuBjH;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAoG3G;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBA4G3G;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBA+D3G;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAuB/G;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAwB/G;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,iBAKrF;AAED,wBAAsB,GAAG,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,WAAW,kDA4B/F"}
{"version":3,"file":"draw.d.ts","sourceRoot":"","sources":["../../../src/draw/draw.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAEvH;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;IACnB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,SAAS,EAAE,OAAO,CAAC;IACnB,cAAc,EAAE,OAAO,CAAC;CACzB;AAED,eAAO,MAAM,OAAO,EAAE,WAkBrB,CAAC;AA2EF,wBAAsB,OAAO,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAuBjH;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAoG3G;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBA4G3G;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBA+D3G;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAuB/G;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,iBAwB/G;AAED,wBAAsB,MAAM,CAAC,KAAK,EAAE,iBAAiB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,EAAE,MAAM,EAAE,iBAAiB,iBAIxI;AAED,wBAAsB,GAAG,CAAC,QAAQ,EAAE,iBAAiB,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,WAAW,kDA4B/F"}

10
types/src/human.d.ts vendored
View File

@ -139,6 +139,9 @@ export declare class Human {
* @returns { tensor, canvas }
*/
image: (input: Input) => {
/** Defines all possible input types for **Human** detection
* @typedef Input Type
*/
tensor: Tensor<import("@tensorflow/tfjs-core").Rank> | null;
canvas: OffscreenCanvas | HTMLCanvasElement;
};
@ -159,7 +162,7 @@ export declare class Human {
* @param background?: {@link Input}
* @returns Canvas
*/
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement | null> | null;
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement | null>;
/** Enhance method performs additional enhacements to face image previously detected for futher this.processing
* @param input: Tensor as provided in human.result.face[n].tensor
* @returns Tensor
@ -195,13 +198,14 @@ export declare class Human {
* @param result?: {@link Result} optional use specific result set to run interpolation on
* @returns result: {@link Result}
*/
next: (result?: Result | undefined) => Result;
next(result?: Result): Result;
/** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
*/
warmup: (userConfig?: Partial<Config> | undefined) => Promise<Result | {
warmup(userConfig?: Partial<Config>): Promise<Result | {
error: any;
}>;
/** Main detection method

View File

@ -1 +1 @@
{"version":3,"file":"human.d.ts","sourceRoot":"","sources":["../../src/human.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,MAAM,EAAY,MAAM,UAAU,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAmE,MAAM,UAAU,CAAC;AACxG,OAAO,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAG1C,OAAO,KAAK,QAAQ,MAAM,sBAAsB,CAAC;AAYjD,OAAO,KAAK,IAAI,MAAM,aAAa,CAAC;AAGpC,OAAO,KAAK,GAAG,MAAM,OAAO,CAAC;AAI7B,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAGvD,cAAc,UAAU,CAAC;AACzB,cAAc,UAAU,CAAC;AACzB,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAE5B;;GAEG;AACH,oBAAY,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;AAE3L;;;;;;GAMG;AACH,oBAAY,MAAM,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,CAAC;AAEvE;;GAEG;AACH,oBAAY,KAAK,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAEtC;;GAEG;AACH,oBAAY,UAAU,GAAG,OAAO,EAAE,CAAC;AAEnC;;;;;;;;;;GAUG;AACH,qBAAa,KAAK;;IAChB,0DAA0D;IAC1D,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,iDAAiD;IACjD,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;KAAE,CAAC;IACvF;;OAEG;IACH,EAAE,EAAE,UAAU,CAAC;IACf;;OAEG;IACH,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC;IACb;;;;;;;OAOG;IACH,IAAI,EAAE;QACJ,OAAO,EAAE,IAAI,CAAC,WAAW,CAAC;QAC1B,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,CAAC;QAC7B,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAC3B,GAAG,EAAE,OAAO,IAAI,CAAC,GAAG,CAAC;KACtB,CAAC;IACF,yCAAyC;IACzC,MAAM,EAAE;QACN,IAAI,EAAE,CAAC,OAAO,EAAE,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC;QAC7D,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,aAAa,EAAE,UAAU,GAAG,IAAI,CAAC;QACjC,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,EAAE,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC;QACxD,GAAG,EAAE,UAAU,GAAG,IAAI,CAAC;QACvB,MAAM,EAAE,UAAU,GAAG,IAAI,CAAC;QAC1B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,YAAY,EAAE,UAAU,GAAG,IAAI,CAAC;KACjC,CAAC;IACF;;;;;;;;OAQG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB,oGAAoG;IACpG,iBAAiB,EAAE,OAAO,QAAQ,CAAC,aAAa,CAAC;IACjD,0EAA0E;IAC1E,SAAS,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC;IACjC,oFAAoF;IACpF,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAIpC,OAAO,EAAE,OAAO,CAAC;IAIjB;;;OAGG;gBACS,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IA+CxC,cAAc;IACd,OAAO,WAAY,MAAM,EAAE,UAO1B;IAgBD;;;;OAIG;IACH,KAAK,UAAW,KAAK;;;MAAuC;IAE5D;;;;;;MAME;IAEF,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,MAAM;IAIxE;;;;;;;;OAQG;IACH,YAAY,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,KAAK;IAI7C;;;OAGG;IAEH,OAAO,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIrC;;;;;OAKG;IAEH,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC,EAAE,SAAS,SAAI,GAAG;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,EAAE,CAAA;KAAE;IAI/L;;;MAGE;IACI,IAAI,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IAiCvC,cAAc;IACd,IAAI,UAAW,MAAM,aAAkD;IAEvE;;;;;;OAMG;IACH,IAAI,0CAA0E;IAE9E;;;;MAIE;IACF,MAAM;;OAAoG;IAE1G;;;;;;;;;MASE;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;CAwKlF;AAED;;GAEG;AACH,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,CAAC"}
{"version":3,"file":"human.d.ts","sourceRoot":"","sources":["../../src/human.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,MAAM,EAAY,MAAM,UAAU,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAmE,MAAM,UAAU,CAAC;AACxG,OAAO,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAG1C,OAAO,KAAK,QAAQ,MAAM,sBAAsB,CAAC;AAYjD,OAAO,KAAK,IAAI,MAAM,aAAa,CAAC;AAGpC,OAAO,KAAK,GAAG,MAAM,OAAO,CAAC;AAI7B,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAGvD,cAAc,UAAU,CAAC;AACzB,cAAc,UAAU,CAAC;AACzB,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAE5B;;GAEG;AACH,oBAAY,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;AAE3L;;;;;;GAMG;AACH,oBAAY,MAAM,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,CAAC;AAEvE;;GAEG;AACH,oBAAY,KAAK,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAEtC;;GAEG;AACH,oBAAY,UAAU,GAAG,OAAO,EAAE,CAAC;AAEnC;;;;;;;;;;GAUG;AACH,qBAAa,KAAK;;IAChB,0DAA0D;IAC1D,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,iDAAiD;IACjD,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;KAAE,CAAC;IACvF;;OAEG;IACH,EAAE,EAAE,UAAU,CAAC;IACf;;OAEG;IACH,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC;IACb;;;;;;;OAOG;IACH,IAAI,EAAE;QACJ,OAAO,EAAE,IAAI,CAAC,WAAW,CAAC;QAC1B,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,CAAC;QAC7B,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QACvB,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAC3B,GAAG,EAAE,OAAO,IAAI,CAAC,GAAG,CAAC;KACtB,CAAC;IACF,yCAAyC;IACzC,MAAM,EAAE;QACN,IAAI,EAAE,CAAC,OAAO,EAAE,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC;QAC7D,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,aAAa,EAAE,UAAU,GAAG,IAAI,CAAC;QACjC,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,EAAE,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC;QACxD,GAAG,EAAE,UAAU,GAAG,IAAI,CAAC;QACvB,MAAM,EAAE,UAAU,GAAG,IAAI,CAAC;QAC1B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,SAAS,EAAE,UAAU,GAAG,IAAI,CAAC;QAC7B,OAAO,EAAE,UAAU,GAAG,IAAI,CAAC;QAC3B,YAAY,EAAE,UAAU,GAAG,IAAI,CAAC;KACjC,CAAC;IACF;;;;;;;;OAQG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB,oGAAoG;IACpG,iBAAiB,EAAE,OAAO,QAAQ,CAAC,aAAa,CAAC;IACjD,0EAA0E;IAC1E,SAAS,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC;IACjC,oFAAoF;IACpF,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAIpC,OAAO,EAAE,OAAO,CAAC;IAGjB;;;OAGG;gBACS,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IA+CxC,cAAc;IACd,OAAO,WAAY,MAAM,EAAE,UAO1B;IAgBD;;;;OAIG;IACH,KAAK,UAAW,KAAK;QArMvB;;WAEG;;;MAmM2D;IAE5D;;;;;;MAME;IAEF,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,MAAM;IAIxE;;;;;;;;OAQG;IACG,YAAY,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,KAAK;IAInD;;;OAGG;IAEH,OAAO,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIrC;;;;;OAKG;IAEH,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC,EAAE,SAAS,SAAI,GAAG;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,EAAE,CAAA;KAAE;IAI/L;;;MAGE;IACI,IAAI,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IAiCvC,cAAc;IACd,IAAI,UAAW,MAAM,aAAkD;IAEvE;;;;;;OAMG;IACH,IAAI,CAAC,MAAM,GAAE,MAAoB;IAIjC;;;;;MAKE;IACI,MAAM,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG;QAAE,KAAK,MAAA;KAAE,CAAC;IAIvE;;;;;;;;;MASE;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;CAwKlF;AAED;;GAEG;AACH,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,CAAC"}

View File

@ -1 +1 @@
{"version":3,"file":"image.d.ts","sourceRoot":"","sources":["../../../src/image/image.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,EAAE,GAAG,EAAE,MAAM,QAAQ,CAAC;AAE7B,aAAK,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,MAAM,CAAC;AAShL,wBAAgB,MAAM,CAAC,KAAK,KAAA,EAAE,MAAM,KAAA,OAgBnC;AAKD,wBAAgB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,CAAA;CAAE,CAoK5H;AAID,wBAAsB,IAAI,CAAC,MAAM,KAAA,EAAE,KAAK,EAAE,MAAM,oBA2B/C"}
{"version":3,"file":"image.d.ts","sourceRoot":"","sources":["../../../src/image/image.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,EAAE,GAAG,EAAE,MAAM,QAAQ,CAAC;AAG7B,aAAK,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,MAAM,CAAC;AAShL,wBAAgB,MAAM,CAAC,KAAK,KAAA,EAAE,MAAM,KAAA,OAgBnC;AAKD,wBAAgB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,CAAA;CAAE,CA0K5H;AAID,wBAAsB,IAAI,CAAC,MAAM,KAAA,EAAE,KAAK,EAAE,MAAM,oBA2B/C"}

2
wiki

@ -1 +1 @@
Subproject commit 44b1bf12ab5dbf4cedde34da123237b1cd02627b
Subproject commit deadc5d4f7db0dca06bbe8bbb956e01287ef3920