diff --git a/.build.json b/.build.json
index 344ce9b0..927f3c8d 100644
--- a/.build.json
+++ b/.build.json
@@ -126,7 +126,7 @@
"format": "iife",
"input": "src/human.ts",
"output": "dist/human.js",
- "minify": true,
+ "minify": false,
"globalName": "Human",
"external": ["fs", "os", "buffer", "util"]
},
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8f7594d1..69888f41 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,11 +11,10 @@
### **HEAD -> main** 2021/11/05 mandic00@live.com
+- add histogram equalization
- implement wasm missing ops
- performance and memory optimizations
-
-### **origin/main** 2021/11/04 mandic00@live.com
-
+- fix react compatibility issues
- improve box rescaling for all modules
- improve precision using wasm backend
- refactor predict with execute
diff --git a/TODO.md b/TODO.md
index 07003f19..f1624a7c 100644
--- a/TODO.md
+++ b/TODO.md
@@ -41,17 +41,20 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
### Pending release
-- Supports all modules on all backends
- via custom implementation of missing kernel ops
-- Performance and precision improvements
- **face** and **hand** modules
-- Use custom built TFJS for bundled version
- reduced bundle size and built-in support for all backends
- `nobundle` and `node` versions link to standard `@tensorflow` packages
-- Add optional input histogram equalization
- auto-level input for optimal brightness/contrast via `config.filter.equalization`
-- Fix **ReactJS** compatibility
-- Better precision using **WASM**
- Previous issues due to math low-precision in WASM implementation
-- Full **TS** type definitions for all modules and imports
-- Focus on simplified demo
+- Supports all modules on all backends
+ via custom implementation of missing kernel ops
+- New frame change detection algorithm used for cache determination
+ based on temporal input difference
+- New optional input histogram equalization
+ auto-level input for optimal brightness/contrast via `config.filter.equalization`
+- Performance and precision improvements
+ **face**, **hand** and **gestures** modules
+- Use custom built TFJS for bundled version
+ reduced bundle size and built-in support for all backends
+ `nobundle` and `node` versions link to standard `@tensorflow` packages
+- Fix **ReactJS** compatibility
+- Better precision using **WASM**
+ Previous issues due to math low-precision in WASM implementation
+- Full **TS** type definitions for all modules and imports
+- Focus on simplified demo
+
diff --git a/demo/facematch/facematch.js b/demo/facematch/facematch.js
index 3620297d..09cdc089 100644
--- a/demo/facematch/facematch.js
+++ b/demo/facematch/facematch.js
@@ -9,25 +9,28 @@
import Human from '../../dist/human.esm.js';
const userConfig = {
- backend: 'wasm',
- async: false,
+ backend: 'humangl',
+ async: true,
warmup: 'none',
cacheSensitivity: 0,
debug: true,
modelBasePath: '../../models/',
+ deallocate: true,
+ filter: {
+ enabled: true,
+ equalization: true,
+ },
face: {
enabled: true,
detector: { rotation: true, return: true, maxDetected: 50 },
mesh: { enabled: true },
- embedding: { enabled: false },
- iris: { enabled: true },
+ iris: { enabled: false },
emotion: { enabled: true },
description: { enabled: true },
},
hand: { enabled: false },
- gesture: { enabled: true },
+ gesture: { enabled: false },
body: { enabled: false },
- filter: { enabled: true },
segmentation: { enabled: false },
};
@@ -73,9 +76,7 @@ async function SelectFaceCanvas(face) {
const squeeze = human.tf.squeeze(enhanced);
const normalize = human.tf.div(squeeze, 255);
await human.tf.browser.toPixels(normalize, c);
- human.tf.dispose(enhanced);
- human.tf.dispose(squeeze);
- human.tf.dispose(normalize);
+ human.tf.dispose([enhanced, squeeze, normalize]);
const ctx = c.getContext('2d');
ctx.font = 'small-caps 0.4rem "Lato"';
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
@@ -134,7 +135,7 @@ async function SelectFaceCanvas(face) {
title('Selected Face');
}
-async function AddFaceCanvas(index, res, fileName) {
+function AddFaceCanvas(index, res, fileName) {
all[index] = res.face;
let ok = false;
for (const i in res.face) {
@@ -161,7 +162,7 @@ async function AddFaceCanvas(index, res, fileName) {
});
// if we actually got face image tensor, draw canvas with that face
if (res.face[i].tensor) {
- await human.tf.browser.toPixels(res.face[i].tensor, canvas);
+ human.tf.browser.toPixels(res.face[i].tensor, canvas);
document.getElementById('faces').appendChild(canvas);
const ctx = canvas.getContext('2d');
if (!ctx) return false;
@@ -169,7 +170,7 @@ async function AddFaceCanvas(index, res, fileName) {
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
ctx.fillText(`${res.face[i].age}y ${(100 * (res.face[i].genderScore || 0)).toFixed(1)}% ${res.face[i].gender}`, 4, canvas.height - 6);
const arr = db.map((rec) => rec.embedding);
- const result = await human.match(res.face[i].embedding, arr);
+ const result = human.match(res.face[i].embedding, arr);
ctx.font = 'small-caps 1rem "Lato"';
if (result.similarity && res.similarity > minScore) ctx.fillText(`${(100 * result.similarity).toFixed(1)}% ${db[result.index].name}`, 4, canvas.height - 30);
}
@@ -184,7 +185,7 @@ async function AddImageElement(index, image, length) {
const img = new Image(128, 128);
img.onload = () => { // must wait until image is loaded
human.detect(img, userConfig).then(async (res) => {
- const ok = await AddFaceCanvas(index, res, image); // then wait until image is analyzed
+ const ok = AddFaceCanvas(index, res, image); // then wait until image is analyzed
// log('Add image:', index + 1, image, 'faces:', res.face.length);
if (ok) document.getElementById('images').appendChild(img); // and finally we can add it
resolve(true);
@@ -199,7 +200,7 @@ async function AddImageElement(index, image, length) {
});
}
-async function createFaceMatchDB() {
+function createFaceMatchDB() {
log('Creating Faces DB...');
for (const image of all) {
for (const face of image) db.push({ name: 'unknown', source: face.fileName, embedding: face.embedding });
@@ -246,6 +247,9 @@ async function main() {
// images = ['/samples/in/solvay1927.jpg'];
// download and analyze all images
+ // const promises = [];
+ // for (let i = 0; i < images.length; i++) promises.push(AddImageElement(i, images[i], images.length));
+ // await Promise.all(promises);
for (let i = 0; i < images.length; i++) await AddImageElement(i, images[i], images.length);
// print stats
@@ -254,7 +258,7 @@ async function main() {
log(human.tf.engine().memory());
// if we didn't download db, generate it from current faces
- if (!db || db.length === 0) await createFaceMatchDB();
+ if (!db || db.length === 0) createFaceMatchDB();
title('');
log('Ready');
diff --git a/demo/index.js b/demo/index.js
index 068054c9..bb21bff9 100644
--- a/demo/index.js
+++ b/demo/index.js
@@ -712,6 +712,7 @@ function setupMenu() {
menu.image = new Menu(document.body, '', { top, left: x[1] });
menu.image.addBool('enabled', userConfig.filter, 'enabled', (val) => userConfig.filter.enabled = val);
+ menu.image.addBool('histogram equalization', userConfig.filter, 'equalization', (val) => userConfig.filter.equalization = val);
ui.menuWidth = menu.image.addRange('image width', userConfig.filter, 'width', 0, 3840, 10, (val) => userConfig.filter.width = parseInt(val));
ui.menuHeight = menu.image.addRange('image height', userConfig.filter, 'height', 0, 2160, 10, (val) => userConfig.filter.height = parseInt(val));
menu.image.addHTML('
');
diff --git a/demo/typescript/index.js b/demo/typescript/index.js
index c62960f0..062894b3 100644
--- a/demo/typescript/index.js
+++ b/demo/typescript/index.js
@@ -7,7 +7,8 @@
// demo/typescript/index.ts
import Human from "../../dist/human.esm.js";
var humanConfig = {
- modelBasePath: "../../models"
+ modelBasePath: "../../models",
+ filter: { equalization: false }
};
var human = new Human(humanConfig);
human.env["perfadd"] = false;
@@ -79,8 +80,8 @@ async function drawLoop() {
setTimeout(drawLoop, 30);
}
async function main() {
- log("human version:", human.version, "tfjs version:", human.tf.version_core);
- log("platform:", human.env.platform, "agent:", human.env.agent);
+ log("human version:", human.version, "| tfjs version:", human.tf.version_core);
+ log("platform:", human.env.platform, "| agent:", human.env.agent);
status("loading...");
await human.load();
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);
diff --git a/demo/typescript/index.js.map b/demo/typescript/index.js.map
index 2780f706..02df9209 100644
--- a/demo/typescript/index.js.map
+++ b/demo/typescript/index.js.map
@@ -1,7 +1,7 @@
{
"version": 3,
"sources": ["index.ts"],
- "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\n/// \n\nimport Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n modelBasePath: '../../models',\n // backend: 'webgpu',\n // async: true,\n // face: { enabled: false, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },\n // body: { enabled: false },\n // hand: { enabled: false },\n // object: { enabled: true },\n // gesture: { enabled: true },\n};\n\nconst human = new Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env['perfadd'] = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function webCam() { // initialize webcam\n status('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n // console.log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n }\n const now = human.now();\n fps.detect = 1000 / (now - timestamp.detect);\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.draw = 1000 / (now - timestamp.draw);\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status\n // requestAnimationFrame(drawLoop); // refresh at screen refresh rate\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, 'tfjs version:', human.tf.version_core);\n log('platform:', human.env.platform, 'agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('loaded models:' + Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"],
- "mappings": ";;;;;;;AAWA;AAXA,AAaA,IAAM,cAAc;AAAA,EAClB,eAAe;AAAA;AAUjB,IAAM,QAAQ,IAAI,MAAM;AAExB,MAAM,IAAI,aAAa;AACvB,MAAM,KAAK,QAAQ,OAAO;AAC1B,MAAM,KAAK,QAAQ,aAAa;AAEhC,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe;AAAA,EAC/B,QAAQ,SAAS,eAAe;AAAA,EAChC,KAAK,SAAS,eAAe;AAAA,EAC7B,KAAK,SAAS,eAAe;AAAA,EAC7B,MAAM,SAAS,eAAe;AAAA;AAEhC,IAAM,YAAY,EAAE,QAAQ,GAAG,MAAM,GAAG,SAAS;AACjD,IAAM,MAAM,EAAE,QAAQ,GAAG,MAAM;AAE/B,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,OAAO;AAErC,UAAQ,IAAI,GAAG;AAAA;AAEjB,IAAM,SAAS,CAAC,QAAQ,IAAI,IAAI,YAAY;AAC5C,IAAM,OAAO,CAAC,QAAQ,IAAI,KAAK,YAAY,aAAa,MAAM,GAAG,SAAS,aAAa,qBAAqB,KAAK,UAAU,KAAK,QAAQ,UAAU,IAAI,QAAQ,MAAM;AAEpK,wBAAwB;AACtB,SAAO;AAEP,QAAM,UAAkC,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,QAAQ,OAAO,EAAE,OAAO,SAAS,KAAK;AACvI,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa;AACtE,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ;AAAA;AAChF,MAAI,MAAM,YAAY;AACtB,MAAI,MAAM;AACV,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,QAAM,QAA0B,OAAO,iBAAiB;AACxD,QAAM,eAAgD,MAAM,kBAAkB,MAAM,oBAAoB;AACxG,QAAM,WAAwC,MAAM,cAAc,MAAM,gBAAgB;AACxF,QAAM,cAA8C,MAAM,iBAAiB,MAAM,mBAAmB;AACpG,MAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,MAAM,OAAO,EAAE,QAAQ,OAAO,UAAU,aAAa;AAChH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,UAAI,MAAM;AAAA;AAC3B,UAAI,MAAM;AAAA;AAAA;AAInB,+BAA+B;AAC7B,MAAI,CAAC,IAAI,MAAM,QAAQ;AAErB,UAAM,MAAM,OAAO,IAAI;AACvB,UAAM,UAAU,MAAM,GAAG,SAAS;AAClC,QAAI,UAAU,UAAU,YAAY;AAAG,UAAI,sBAAsB,UAAU,UAAU;AACrF,cAAU,UAAU;AAAA;AAEtB,QAAM,MAAM,MAAM;AAClB,MAAI,SAAS,MAAQ,OAAM,UAAU;AACrC,YAAU,SAAS;AACnB,wBAAsB;AAAA;AAGxB,0BAA0B;AACxB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,eAAe,MAAM,MAAM,KAAK,MAAM;AAC5C,UAAM,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI;AACvC,UAAM,MAAM,KAAK,IAAI,IAAI,QAAQ;AACjC,SAAK,aAAa;AAAA;AAEpB,QAAM,MAAM,MAAM;AAClB,MAAI,OAAO,MAAQ,OAAM,UAAU;AACnC,YAAU,OAAO;AACjB,SAAO,IAAI,MAAM,SAAS,WAAW,QAAQ,IAAI,OAAO,QAAQ,GAAG,SAAS,GAAG,iBAAiB,IAAI,KAAK,QAAQ,GAAG,SAAS,GAAG;AAEhI,aAAW,UAAU;AAAA;AAGvB,sBAAsB;AACpB,MAAI,kBAAkB,MAAM,SAAS,iBAAiB,MAAM,GAAG;AAC/D,MAAI,aAAa,MAAM,IAAI,UAAU,UAAU,MAAM,IAAI;AACzD,SAAO;AACP,QAAM,MAAM;AACZ,MAAI,YAAY,MAAM,GAAG,cAAc,gBAAgB,MAAM,IAAI;AACjE,MAAI,mBAAmB,OAAO,OAAO,MAAM,QAAQ,OAAO,CAAC,UAAU,UAAU,MAAM;AACrF,SAAO;AACP,QAAM,MAAM;AACZ,QAAM;AACN,QAAM;AACN,QAAM;AAAA;AAGR,OAAO,SAAS;",
+ "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\n/// \n\nimport Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n modelBasePath: '../../models',\n filter: { equalization: false },\n // backend: 'webgpu',\n // async: true,\n // face: { enabled: false, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },\n // body: { enabled: false },\n // hand: { enabled: false },\n // object: { enabled: true },\n // gesture: { enabled: true },\n};\n\nconst human = new Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env['perfadd'] = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function webCam() { // initialize webcam\n status('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n // console.log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n }\n const now = human.now();\n fps.detect = 1000 / (now - timestamp.detect);\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.draw = 1000 / (now - timestamp.draw);\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status\n // requestAnimationFrame(drawLoop); // refresh at screen refresh rate\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version_core);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('loaded models:' + Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"],
+ "mappings": ";;;;;;;AAWA;AAXA,AAaA,IAAM,cAAc;AAAA,EAClB,eAAe;AAAA,EACf,QAAQ,EAAE,cAAc;AAAA;AAU1B,IAAM,QAAQ,IAAI,MAAM;AAExB,MAAM,IAAI,aAAa;AACvB,MAAM,KAAK,QAAQ,OAAO;AAC1B,MAAM,KAAK,QAAQ,aAAa;AAEhC,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe;AAAA,EAC/B,QAAQ,SAAS,eAAe;AAAA,EAChC,KAAK,SAAS,eAAe;AAAA,EAC7B,KAAK,SAAS,eAAe;AAAA,EAC7B,MAAM,SAAS,eAAe;AAAA;AAEhC,IAAM,YAAY,EAAE,QAAQ,GAAG,MAAM,GAAG,SAAS;AACjD,IAAM,MAAM,EAAE,QAAQ,GAAG,MAAM;AAE/B,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,OAAO;AAErC,UAAQ,IAAI,GAAG;AAAA;AAEjB,IAAM,SAAS,CAAC,QAAQ,IAAI,IAAI,YAAY;AAC5C,IAAM,OAAO,CAAC,QAAQ,IAAI,KAAK,YAAY,aAAa,MAAM,GAAG,SAAS,aAAa,qBAAqB,KAAK,UAAU,KAAK,QAAQ,UAAU,IAAI,QAAQ,MAAM;AAEpK,wBAAwB;AACtB,SAAO;AAEP,QAAM,UAAkC,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,QAAQ,OAAO,EAAE,OAAO,SAAS,KAAK;AACvI,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa;AACtE,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ;AAAA;AAChF,MAAI,MAAM,YAAY;AACtB,MAAI,MAAM;AACV,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,QAAM,QAA0B,OAAO,iBAAiB;AACxD,QAAM,eAAgD,MAAM,kBAAkB,MAAM,oBAAoB;AACxG,QAAM,WAAwC,MAAM,cAAc,MAAM,gBAAgB;AACxF,QAAM,cAA8C,MAAM,iBAAiB,MAAM,mBAAmB;AACpG,MAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,MAAM,OAAO,EAAE,QAAQ,OAAO,UAAU,aAAa;AAChH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,UAAI,MAAM;AAAA;AAC3B,UAAI,MAAM;AAAA;AAAA;AAInB,+BAA+B;AAC7B,MAAI,CAAC,IAAI,MAAM,QAAQ;AAErB,UAAM,MAAM,OAAO,IAAI;AACvB,UAAM,UAAU,MAAM,GAAG,SAAS;AAClC,QAAI,UAAU,UAAU,YAAY;AAAG,UAAI,sBAAsB,UAAU,UAAU;AACrF,cAAU,UAAU;AAAA;AAEtB,QAAM,MAAM,MAAM;AAClB,MAAI,SAAS,MAAQ,OAAM,UAAU;AACrC,YAAU,SAAS;AACnB,wBAAsB;AAAA;AAGxB,0BAA0B;AACxB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,eAAe,MAAM,MAAM,KAAK,MAAM;AAC5C,UAAM,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI;AACvC,UAAM,MAAM,KAAK,IAAI,IAAI,QAAQ;AACjC,SAAK,aAAa;AAAA;AAEpB,QAAM,MAAM,MAAM;AAClB,MAAI,OAAO,MAAQ,OAAM,UAAU;AACnC,YAAU,OAAO;AACjB,SAAO,IAAI,MAAM,SAAS,WAAW,QAAQ,IAAI,OAAO,QAAQ,GAAG,SAAS,GAAG,iBAAiB,IAAI,KAAK,QAAQ,GAAG,SAAS,GAAG;AAEhI,aAAW,UAAU;AAAA;AAGvB,sBAAsB;AACpB,MAAI,kBAAkB,MAAM,SAAS,mBAAmB,MAAM,GAAG;AACjE,MAAI,aAAa,MAAM,IAAI,UAAU,YAAY,MAAM,IAAI;AAC3D,SAAO;AACP,QAAM,MAAM;AACZ,MAAI,YAAY,MAAM,GAAG,cAAc,gBAAgB,MAAM,IAAI;AACjE,MAAI,mBAAmB,OAAO,OAAO,MAAM,QAAQ,OAAO,CAAC,UAAU,UAAU,MAAM;AACrF,SAAO;AACP,QAAM,MAAM;AACZ,QAAM;AACN,QAAM;AACN,QAAM;AAAA;AAGR,OAAO,SAAS;",
"names": []
}
diff --git a/demo/typescript/index.ts b/demo/typescript/index.ts
index 44c732bc..c85d3495 100644
--- a/demo/typescript/index.ts
+++ b/demo/typescript/index.ts
@@ -13,7 +13,7 @@ import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
const humanConfig = { // user configuration for human, used to fine-tune behavior
modelBasePath: '../../models',
- filter: { equalization: true },
+ filter: { equalization: false },
// backend: 'webgpu',
// async: true,
// face: { enabled: false, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },
@@ -99,8 +99,8 @@ async function drawLoop() { // main screen refresh loop
}
async function main() { // main entry point
- log('human version:', human.version, 'tfjs version:', human.tf.version_core);
- log('platform:', human.env.platform, 'agent:', human.env.agent);
+ log('human version:', human.version, '| tfjs version:', human.tf.version_core);
+ log('platform:', human.env.platform, '| agent:', human.env.agent);
status('loading...');
await human.load(); // preload all models
log('backend:', human.tf.getBackend(), '| available:', human.env.backends);
diff --git a/dist/human.esm-nobundle.js b/dist/human.esm-nobundle.js
index 3113bcf9..74e218d6 100644
--- a/dist/human.esm-nobundle.js
+++ b/dist/human.esm-nobundle.js
@@ -117,6 +117,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
+ deallocate: false,
filter: {
enabled: true,
equalization: false,
@@ -233,7 +234,9 @@ __export(tfjs_esm_exports, {
version: () => version9
});
__reExport(tfjs_esm_exports, dist_star);
+__reExport(tfjs_esm_exports, dist_star2);
import * as dist_star from "@tensorflow/tfjs/dist/index.js";
+import * as dist_star2 from "@tensorflow/tfjs-backend-webgl/dist/index.js";
import { Tensor } from "@tensorflow/tfjs/dist/index.js";
import { GraphModel } from "@tensorflow/tfjs-converter/dist/index";
var version = "3.11.0";
@@ -907,8 +910,8 @@ function GLImageFilter() {
this.get = function() {
return filterChain;
};
- this.apply = function(image25) {
- resize(image25.width, image25.height);
+ this.apply = function(image24) {
+ resize(image24.width, image24.height);
drawCount = 0;
if (!sourceTexture)
sourceTexture = gl.createTexture();
@@ -917,7 +920,7 @@ function GLImageFilter() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
- gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
for (let i = 0; i < filterChain.length; i++) {
lastInChain = i === filterChain.length - 1;
const f = filterChain[i];
@@ -925,24 +928,27 @@ function GLImageFilter() {
}
return fxcanvas;
};
- this.draw = function(image25) {
+ this.draw = function(image24) {
this.add("brightness", 0);
- return this.apply(image25);
+ return this.apply(image24);
};
}
// src/image/enhance.ts
-function histogramEqualization(input) {
- const channels = tfjs_esm_exports.split(input, 3, 2);
+async function histogramEqualization(inputImage) {
+ const squeeze9 = inputImage.shape.length === 4 ? tfjs_esm_exports.squeeze(inputImage) : inputImage;
+ const channels = tfjs_esm_exports.split(squeeze9, 3, 2);
const min2 = [tfjs_esm_exports.min(channels[0]), tfjs_esm_exports.min(channels[1]), tfjs_esm_exports.min(channels[2])];
const max4 = [tfjs_esm_exports.max(channels[0]), tfjs_esm_exports.max(channels[1]), tfjs_esm_exports.max(channels[2])];
- const sub6 = [tfjs_esm_exports.sub(channels[0], min2[0]), tfjs_esm_exports.sub(channels[1], min2[1]), tfjs_esm_exports.sub(channels[2], min2[2])];
+ const absMax = await Promise.all(max4.map((channel) => channel.data()));
+ const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
+ const sub7 = [tfjs_esm_exports.sub(channels[0], min2[0]), tfjs_esm_exports.sub(channels[1], min2[1]), tfjs_esm_exports.sub(channels[2], min2[2])];
const range = [tfjs_esm_exports.sub(max4[0], min2[0]), tfjs_esm_exports.sub(max4[1], min2[1]), tfjs_esm_exports.sub(max4[2], min2[2])];
- const fact = [tfjs_esm_exports.div(255, range[0]), tfjs_esm_exports.div(255, range[1]), tfjs_esm_exports.div(255, range[2])];
- const enh = [tfjs_esm_exports.mul(sub6[0], fact[0]), tfjs_esm_exports.mul(sub6[1], fact[1]), tfjs_esm_exports.mul(sub6[2], fact[2])];
+ const fact = [tfjs_esm_exports.div(maxValue, range[0]), tfjs_esm_exports.div(maxValue, range[1]), tfjs_esm_exports.div(maxValue, range[2])];
+ const enh = [tfjs_esm_exports.mul(sub7[0], fact[0]), tfjs_esm_exports.mul(sub7[1], fact[1]), tfjs_esm_exports.mul(sub7[2], fact[2])];
const rgb2 = tfjs_esm_exports.stack([enh[0], enh[1], enh[2]], 2);
- const reshape8 = tfjs_esm_exports.reshape(rgb2, [1, input.shape[0], input.shape[1], 3]);
- tfjs_esm_exports.dispose([...channels, ...min2, ...max4, ...sub6, ...range, ...fact, ...enh, rgb2]);
+ const reshape8 = tfjs_esm_exports.reshape(rgb2, [1, squeeze9.shape[0], squeeze9.shape[1], 3]);
+ tfjs_esm_exports.dispose([...channels, ...min2, ...max4, ...sub7, ...range, ...fact, ...enh, rgb2, squeeze9]);
return reshape8;
}
@@ -952,6 +958,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
+var last = {
+ inputSum: 0,
+ cacheDiff: 1,
+ sumMethod: 0,
+ inputTensor: void 0
+};
function canvas(width, height) {
let c;
if (env.browser) {
@@ -978,7 +990,7 @@ function copy(input, output) {
ctx.drawImage(input, 0, 0);
return outputCanvas;
}
-function process2(input, config3, getTensor = true) {
+async function process2(input, config3, getTensor = true) {
if (!input) {
if (config3.debug)
log("input is missing");
@@ -1028,7 +1040,7 @@ function process2(input, config3, getTensor = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
- if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
+ if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input instanceof ImageData) {
@@ -1037,13 +1049,13 @@ function process2(input, config3, getTensor = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
- inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
+ inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
- inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
+ inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
- if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
+ if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env.webgl.supported) {
if (!fx)
@@ -1131,51 +1143,31 @@ function process2(input, config3, getTensor = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = tfjs_esm_exports.cast(pixels, "float32");
- const tensor3 = config3.filter.equalization ? histogramEqualization(casted) : tfjs_esm_exports.expandDims(casted, 0);
+ const tensor3 = config3.filter.equalization ? await histogramEqualization(casted) : tfjs_esm_exports.expandDims(casted, 0);
tfjs_esm_exports.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
}
}
-var lastInputSum = 0;
-var lastCacheDiff = 1;
-var benchmarked = 0;
-var checksum = async (input) => {
- const resizeFact = 48;
- const reduced = tfjs_esm_exports.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
- const tfSum = async () => {
- const sumT = tfjs_esm_exports.sum(reduced);
- const sum0 = await sumT.data();
- tfjs_esm_exports.dispose(sumT);
- return sum0[0];
- };
- const jsSum = async () => {
- const reducedData = await reduced.data();
- let sum0 = 0;
- for (let i = 0; i < reducedData.length / 3; i++)
- sum0 += reducedData[3 * i + 2];
- return sum0;
- };
- if (benchmarked === 0) {
- const t0 = now();
- await jsSum();
- const t1 = now();
- await tfSum();
- const t2 = now();
- benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
- }
- const res = benchmarked === 1 ? await jsSum() : await tfSum();
- tfjs_esm_exports.dispose(reduced);
- return res;
-};
async function skip(config3, input) {
+ let skipFrame = false;
if (config3.cacheSensitivity === 0)
- return false;
- const sum2 = await checksum(input);
- const diff = 100 * (Math.max(sum2, lastInputSum) / Math.min(sum2, lastInputSum) - 1);
- lastInputSum = sum2;
- let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
- lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
- skipFrame = skipFrame && lastCacheDiff > 0;
+ return skipFrame;
+ if (!last.inputTensor) {
+ last.inputTensor = tfjs_esm_exports.clone(input);
+ } else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) {
+ tfjs_esm_exports.dispose(last.inputTensor);
+ last.inputTensor = tfjs_esm_exports.clone(input);
+ } else {
+ const t = {};
+ t.diff = tfjs_esm_exports.sub(input, last.inputTensor);
+ t.squared = tfjs_esm_exports.mul(t.diff, t.diff);
+ t.sum = tfjs_esm_exports.sum(t.squared);
+ const diffSum = await t.sum.data();
+ const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3;
+ tfjs_esm_exports.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
+ last.inputTensor = tfjs_esm_exports.clone(input);
+ skipFrame = diffRelative <= config3.cacheSensitivity;
+ }
return skipFrame;
}
@@ -1329,7 +1321,7 @@ async function load2(config3) {
log("cached model:", model2["modelUrl"]);
return model2;
}
-async function predict(image25, config3, idx, count2) {
+async function predict(image24, config3, idx, count2) {
var _a, _b;
if (!model2)
return null;
@@ -1341,7 +1333,7 @@ async function predict(image25, config3, idx, count2) {
}
skipped2 = 0;
return new Promise(async (resolve) => {
- const resize = tfjs_esm_exports.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
+ const resize = tfjs_esm_exports.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const res = model2 == null ? void 0 : model2.execute(resize);
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
@@ -4655,10 +4647,10 @@ var scaleBoxCoordinates = (box4, factor) => {
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
-var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
- const h = image25.shape[1];
- const w = image25.shape[2];
- const crop2 = tfjs_esm_exports.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
+var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
+ const h = image24.shape[1];
+ const w = image24.shape[2];
+ const crop2 = tfjs_esm_exports.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tfjs_esm_exports.div(crop2, 255);
tfjs_esm_exports.dispose(crop2);
return norm;
@@ -5157,7 +5149,7 @@ var labels = [
// src/object/centernet.ts
var model4;
var inputSize3 = 0;
-var last = [];
+var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@@ -5224,9 +5216,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
- if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
+ if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
- return last;
+ return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@@ -5236,7 +5228,7 @@ async function predict3(input, config3) {
lastTime3 = now();
tfjs_esm_exports.dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
- last = obj;
+ last2 = obj;
resolve(obj);
});
}
@@ -5306,7 +5298,7 @@ function max2d(inputs, minScore) {
return [0, 0, newScore];
});
}
-async function predict4(image25, config3) {
+async function predict4(image24, config3) {
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
@@ -5319,7 +5311,7 @@ async function predict4(image25, config3) {
const tensor3 = tfjs_esm_exports.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null;
- const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
+ const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const enhance3 = tfjs_esm_exports.mul(resize, 2);
const norm = enhance3.sub(1);
return norm;
@@ -5331,10 +5323,10 @@ async function predict4(image25, config3) {
tfjs_esm_exports.dispose(tensor3);
if (resT) {
cache2.keypoints.length = 0;
- const squeeze8 = resT.squeeze();
+ const squeeze9 = resT.squeeze();
tfjs_esm_exports.dispose(resT);
- const stack4 = squeeze8.unstack(2);
- tfjs_esm_exports.dispose(squeeze8);
+ const stack4 = squeeze9.unstack(2);
+ tfjs_esm_exports.dispose(squeeze9);
for (let id = 0; id < stack4.length; id++) {
const [x2, y2, partScore] = max2d(stack4[id], config3.body.minConfidence);
if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
@@ -5346,8 +5338,8 @@ async function predict4(image25, config3) {
y2 / model5.inputs[0].shape[1]
],
position: [
- Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
- Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
+ Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
+ Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
]
});
}
@@ -5388,7 +5380,7 @@ async function predict4(image25, config3) {
// src/gear/emotion.ts
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6;
-var last2 = [];
+var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@@ -5407,15 +5399,15 @@ async function load6(config3) {
log("cached model:", model6["modelUrl"]);
return model6;
}
-async function predict5(image25, config3, idx, count2) {
+async function predict5(image24, config3, idx, count2) {
var _a, _b;
if (!model6)
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
- if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
+ if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last3[idx] && last3[idx].length > 0) {
skipped6++;
- return last2[idx];
+ return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@@ -5424,7 +5416,7 @@ async function predict5(image25, config3, idx, count2) {
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
const t = {};
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
- t.resize = tfjs_esm_exports.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
+ t.resize = tfjs_esm_exports.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
[t.red, t.green, t.blue] = tfjs_esm_exports.split(t.resize, 3, 3);
t.redNorm = tfjs_esm_exports.mul(t.red, rgb[0]);
t.greenNorm = tfjs_esm_exports.mul(t.green, rgb[1]);
@@ -5442,7 +5434,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor3) => tfjs_esm_exports.dispose(t[tensor3]));
}
- last2[idx] = obj;
+ last3[idx] = obj;
lastCount2 = count2;
resolve(obj);
});
@@ -5588,7 +5580,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
- var _a, _b, _c, _d, _e, _f, _g, _h;
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@@ -5632,8 +5624,13 @@ async function predict6(input, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
+ if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
+ const equilized = await histogramEqualization(face5.tensor);
+ tfjs_esm_exports.dispose(face5.tensor);
+ face5.tensor = equilized;
+ }
face5.boxScore = Math.round(100 * box4.confidence) / 100;
- if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
+ if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@@ -5655,10 +5652,10 @@ async function predict6(input, config3) {
const coordsReshaped = tfjs_esm_exports.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tfjs_esm_exports.dispose([contourCoords, coordsReshaped, confidence, contours]);
- if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
+ if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
- if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
+ if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@@ -5698,7 +5695,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var model9;
-var last3 = [];
+var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@@ -5726,15 +5723,15 @@ function enhance2(input) {
tfjs_esm_exports.dispose(crop2);
return norm;
}
-async function predict7(image25, config3, idx, count2) {
+async function predict7(image24, config3, idx, count2) {
var _a, _b, _c, _d;
if (!model9)
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
- if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
+ if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
- return last3[idx];
+ return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@@ -5746,7 +5743,7 @@ async function predict7(image25, config3, idx, count2) {
descriptor: []
};
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
- const enhanced = enhance2(image25);
+ const enhanced = enhance2(image24);
const resT = model9 == null ? void 0 : model9.execute(enhanced);
lastTime7 = now();
tfjs_esm_exports.dispose(enhanced);
@@ -5768,7 +5765,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tfjs_esm_exports.dispose(t));
}
- last3[idx] = obj;
+ last4[idx] = obj;
lastCount3 = count2;
resolve(obj);
});
@@ -5787,16 +5784,16 @@ function getBoxCenter2(box4) {
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
];
}
-function cutBoxFromImageAndResize2(box4, image25, cropSize) {
- const h = image25.shape[1];
- const w = image25.shape[2];
+function cutBoxFromImageAndResize2(box4, image24, cropSize) {
+ const h = image24.shape[1];
+ const w = image24.shape[2];
const boxes = [[
box4.startPoint[1] / h,
box4.startPoint[0] / w,
box4.endPoint[1] / h,
box4.endPoint[0] / w
]];
- return tfjs_esm_exports.image.cropAndResize(image25, boxes, [0], cropSize);
+ return tfjs_esm_exports.image.cropAndResize(image24, boxes, [0], cropSize);
}
function scaleBoxCoordinates2(box4, factor) {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
@@ -8978,13 +8975,13 @@ var HandPipeline = class {
Math.trunc(coord[2])
]);
}
- async estimateHands(image25, config3) {
+ async estimateHands(image24, config3) {
let useFreshBox = false;
let boxes;
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
- boxes = await this.handDetector.predict(image25, config3);
+ boxes = await this.handDetector.predict(image24, config3);
this.skipped = 0;
}
if (config3.skipAllowed)
@@ -9003,8 +9000,8 @@ var HandPipeline = class {
if (config3.hand.landmarks) {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
- const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
- const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
+ const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
+ const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@@ -9942,7 +9939,7 @@ async function load11(config3) {
inputSize7 = 256;
return model10;
}
-async function parseSinglePose(res, config3, image25, inputBox) {
+async function parseSinglePose(res, config3, image24, inputBox) {
const kpt4 = res[0][0];
const keypoints = [];
let score = 0;
@@ -9958,15 +9955,15 @@ async function parseSinglePose(res, config3, image25, inputBox) {
part: kpt3[id],
positionRaw,
position: [
- Math.round((image25.shape[2] || 0) * positionRaw[0]),
- Math.round((image25.shape[1] || 0) * positionRaw[1])
+ Math.round((image24.shape[2] || 0) * positionRaw[0]),
+ Math.round((image24.shape[1] || 0) * positionRaw[1])
]
});
}
}
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
const bodies = [];
- const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
+ const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@@ -9983,7 +9980,7 @@ async function parseSinglePose(res, config3, image25, inputBox) {
bodies.push(body4);
return bodies;
}
-async function parseMultiPose(res, config3, image25, inputBox) {
+async function parseMultiPose(res, config3, image24, inputBox) {
const bodies = [];
for (let id = 0; id < res[0].length; id++) {
const kpt4 = res[0][id];
@@ -10001,11 +9998,11 @@ async function parseMultiPose(res, config3, image25, inputBox) {
part: kpt3[i],
score: Math.round(100 * score) / 100,
positionRaw,
- position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
+ position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
});
}
}
- const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
+ const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@@ -10057,7 +10054,7 @@ async function predict10(input, config3) {
// src/object/nanodet.ts
var model11;
-var last4 = [];
+var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@@ -10137,19 +10134,19 @@ async function process4(res, inputSize8, outputShape, config3) {
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
return results;
}
-async function predict11(image25, config3) {
+async function predict11(image24, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
- if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
+ if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
- return last4;
+ return last5;
}
skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
- return last4;
+ return last5;
return new Promise(async (resolve) => {
- const outputSize2 = [image25.shape[2], image25.shape[1]];
- const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
+ const outputSize2 = [image24.shape[2], image24.shape[1]];
+ const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
const norm = tfjs_esm_exports.div(resize, 255);
const transpose = norm.transpose([0, 3, 1, 2]);
tfjs_esm_exports.dispose(norm);
@@ -10160,7 +10157,7 @@ async function predict11(image25, config3) {
lastTime10 = now();
tfjs_esm_exports.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
- last4 = obj;
+ last5 = obj;
resolve(obj);
});
}
@@ -10526,7 +10523,7 @@ async function process5(input, background, config3) {
busy = true;
if (!model13)
await load14(config3);
- const inputImage = process2(input, config3);
+ const inputImage = await process2(input, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@@ -10577,7 +10574,7 @@ async function process5(input, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
- const bgImage = process2(background, config3);
+ const bgImage = await process2(background, config3);
tfjs_esm_exports.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@@ -10819,6 +10816,7 @@ function registerCustomOps() {
kernelFunc: (op) => tfjs_esm_exports.tidy(() => tfjs_esm_exports.sub(op.inputs.a, tfjs_esm_exports.mul(tfjs_esm_exports.div(op.inputs.a, op.inputs.b), op.inputs.b)))
};
tfjs_esm_exports.registerKernel(kernelMod);
+ env.kernels.push("mod");
}
if (!env.kernels.includes("floormod")) {
const kernelMod = {
@@ -10827,8 +10825,8 @@ function registerCustomOps() {
kernelFunc: (op) => tfjs_esm_exports.tidy(() => tfjs_esm_exports.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tfjs_esm_exports.mod(op.inputs.a, op.inputs.b))
};
tfjs_esm_exports.registerKernel(kernelMod);
+ env.kernels.push("floormod");
}
- env.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@@ -10915,7 +10913,7 @@ async function check(instance, force = false) {
await tfjs_esm_exports.ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tfjs_esm_exports.getBackend();
- env.updateBackend();
+ await env.updateBackend();
registerCustomOps();
}
return true;
@@ -11611,8 +11609,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
- if (leftShoulder && rightShoulder)
+ if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
+ }
}
return gestures;
};
@@ -12949,7 +12948,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
- const img = process2(input, this.config);
+ const img = await process2(input, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@@ -12969,7 +12968,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
- this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
+ this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];
diff --git a/dist/human.esm-nobundle.js.map b/dist/human.esm-nobundle.js.map
index 51a37672..38bec479 100644
--- a/dist/human.esm-nobundle.js.map
+++ b/dist/human.esm-nobundle.js.map
@@ -1,7 +1,7 @@
{
"version": 3,
"sources": ["../src/util/util.ts", "../src/config.ts", "tfjs.esm.js", "../src/image/imagefxshaders.ts", "../src/image/imagefx.ts", "../src/image/enhance.ts", "../src/image/image.ts", "../src/util/env.ts", "../src/gear/gear-agegenderrace.ts", "../src/face/antispoof.ts", "../src/face/facemeshcoords.ts", "../src/face/facemeshutil.ts", "../src/face/blazeface.ts", "../src/body/blazeposecoords.ts", "../src/body/blazepose.ts", "../src/object/labels.ts", "../src/object/centernet.ts", "../src/body/efficientposecoords.ts", "../src/body/efficientpose.ts", "../src/gear/emotion.ts", "../src/face/iris.ts", "../src/face/facemesh.ts", "../src/face/faceres.ts", "../src/hand/handposeutil.ts", "../src/hand/handposeanchors.ts", "../src/hand/handposedetector.ts", "../src/hand/handposepipeline.ts", "../src/hand/fingerdef.ts", "../src/hand/fingergesture.ts", "../src/hand/fingerpose.ts", "../src/hand/handpose.ts", "../src/util/box.ts", "../src/hand/handtrack.ts", "../src/body/movenetcoords.ts", "../src/body/movenetfix.ts", "../src/body/movenet.ts", "../src/object/nanodet.ts", "../src/body/posenetutils.ts", "../src/body/posenet.ts", "../src/segmentation/segmentation.ts", "../src/models.ts", "../src/tfjs/humangl.ts", "../src/tfjs/backend.ts", "../src/util/draw.ts", "../src/face/angles.ts", "../src/face/face.ts", "../src/gesture/gesture.ts", "../src/util/interpolate.ts", "../src/face/match.ts", "../src/util/persons.ts", "../src/sample.ts", "../src/warmup.ts", "../src/human.ts"],
- "sourcesContent": ["/**\n * Simple helper functions used accross codebase\n */\n\n// helper function: join two paths\nexport function join(folder: string, file: string): string {\n const separator = folder.endsWith('/') ? '' : '/';\n const skipJoin = file.startsWith('.') || file.startsWith('/') || file.startsWith('http:') || file.startsWith('https:') || file.startsWith('file:');\n const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;\n if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`modelpath error: ${path} expecting json file`);\n return path;\n}\n\n// helper function: wrapper around console output\nexport function log(...msg): void {\n const dt = new Date();\n const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;\n // eslint-disable-next-line no-console\n if (msg) console.log(ts, 'Human:', ...msg);\n}\n\n// helper function: gets elapsed time on both browser and nodejs\nexport const now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());\n};\n\n// helper function: checks current config validity\nexport function validate(defaults, config, parent = 'config', msgs: Array<{ reason: string, where: string, expected?: string }> = []) {\n for (const key of Object.keys(config)) {\n if (typeof config[key] === 'object') {\n validate(defaults[key], config[key], key, msgs);\n } else {\n const defined = defaults && (typeof defaults[key] !== 'undefined');\n if (!defined) msgs.push({ reason: 'unknown property', where: `${parent}.${key} = ${config[key]}` });\n const same = defaults && typeof defaults[key] === typeof config[key];\n if (defined && !same) msgs.push({ reason: 'property type mismatch', where: `${parent}.${key} = ${config[key]}`, expected: typeof defaults[key] });\n }\n // ok = ok && defined && same;\n }\n if (config.debug && parent === 'config' && msgs.length > 0) log('invalid configuration', msgs);\n return msgs;\n}\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nexport function mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) prev[key] = pVal.concat(...oVal);\n else if (isObject(pVal) && isObject(oVal)) prev[key] = mergeDeep(pVal, oVal);\n else prev[key] = oVal;\n });\n return prev;\n }, {});\n}\n\n// helper function: return min and max from input array\nexport const minmax = (data: Array) => data.reduce((acc: Array, val) => {\n acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];\n acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];\n return acc;\n}, []);\n\n// helper function: async wait\nexport async function wait(time) {\n const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));\n await waiting;\n}\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\n/** Generic config type inherited by all module types */\nexport interface GenericConfig {\n /** @property is module enabled? */\n enabled: boolean,\n /** @property path to model json file */\n modelPath: string,\n /** @property how many max frames to go without re-running model if cached results are acceptable */\n skipFrames: number,\n /** @property how many max miliseconds to go without re-running model if cached results are acceptable */\n skipTime: number,\n}\n\n/** Dectector part of face configuration */\nexport interface FaceDetectorConfig extends GenericConfig {\n /** @property is face rotation correction performed after detecting face? */\n rotation: boolean,\n /** @property maximum number of detected faces */\n maxDetected: number,\n /** @property minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n /** @property minimum overlap between two detected faces before one is discarded */\n iouThreshold: number,\n /** @property should face detection return face tensor to be used in some other extenrnal model? */\n return: boolean,\n}\n\n/** Mesh part of face configuration */\nexport interface FaceMeshConfig extends GenericConfig {}\n\n/** Iris part of face configuration */\nexport interface FaceIrisConfig extends GenericConfig {}\n\n/** Description or face embedding part of face configuration\n * - also used by age and gender detection\n */\nexport interface FaceDescriptionConfig extends GenericConfig {\n /** @property minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n}\n\n/** Emotion part of face configuration */\nexport interface FaceEmotionConfig extends GenericConfig {\n /** @property minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n}\n\n/** Anti-spoofing part of face configuration */\nexport interface FaceAntiSpoofConfig extends GenericConfig {}\n\n/** Configures all face-specific options: face detection, mesh analysis, age, gender, emotion detection and face description */\nexport interface FaceConfig extends GenericConfig {\n detector: Partial,\n mesh: Partial,\n iris: Partial,\n description: Partial,\n emotion: Partial,\n antispoof: Partial,\n}\n\n/** Configures all body detection specific options */\nexport interface BodyConfig extends GenericConfig {\n /** @property maximum numboer of detected bodies */\n maxDetected: number,\n /** @property minimum confidence for a detected body before results are discarded */\n minConfidence: number,\n detector?: {\n /** @property path to optional body detector model json file */\n modelPath: string\n },\n}\n\n/** Configures all hand detection specific options */\nexport interface HandConfig extends GenericConfig {\n /** @property should hand rotation correction be performed after hand detection? */\n rotation: boolean,\n /** @property minimum confidence for a detected hand before results are discarded */\n minConfidence: number,\n /** @property minimum overlap between two detected hands before one is discarded */\n iouThreshold: number,\n /** @property maximum number of detected hands */\n maxDetected: number,\n /** @property should hand landmarks be detected or just return detected hand box */\n landmarks: boolean,\n detector: {\n /** @property path to hand detector model json */\n modelPath?: string,\n },\n skeleton: {\n /** @property path to hand skeleton model json */\n modelPath?: string,\n },\n}\n\n/** Configures all object detection specific options */\nexport interface ObjectConfig extends GenericConfig {\n /** @property minimum confidence for a detected objects before results are discarded */\n minConfidence: number,\n /** @property minimum overlap between two detected objects before one is discarded */\n iouThreshold: number,\n /** @property maximum number of detected objects */\n maxDetected: number,\n}\n\n/** Configures all body segmentation module\n * removes background from input containing person\n * if segmentation is enabled it will run as preprocessing task before any other model\n * alternatively leave it disabled and use it on-demand using human.segmentation method which can\n * remove background or replace it with user-provided background\n*/\nexport interface SegmentationConfig extends GenericConfig {\n /** @property blur segmentation output by pixels for more realistic image */\n blur: number,\n}\n\n/** Run input through image filters before inference\n * - available only in Browser environments\n * - image filters run with near-zero latency as they are executed on the GPU using WebGL\n*/\nexport interface FilterConfig {\n /** @property are image filters enabled? */\n enabled: boolean,\n /** @property perform image histogram equalization */\n equalization: boolean,\n /** resize input width\n * - if both width and height are set to 0, there is no resizing\n * - if just one is set, second one is scaled automatically\n * - if both are set, values are used as-is\n * @property\n */\n width: number,\n /** resize input height\n * - if both width and height are set to 0, there is no resizing\n * - if just one is set, second one is scaled automatically\n * - if both are set, values are used as-is\n * @property\n */\n height: number,\n /** @property return processed canvas imagedata in result */\n return: boolean,\n /** @property flip input as mirror image */\n flip: boolean,\n /** @property range: -1 (darken) to 1 (lighten) */\n brightness: number,\n /** @property range: -1 (reduce contrast) to 1 (increase contrast) */\n contrast: number,\n /** @property range: 0 (no sharpening) to 1 (maximum sharpening) */\n sharpness: number,\n /** @property range: 0 (no blur) to N (blur radius in pixels) */\n blur: number\n /** @property range: -1 (reduce saturation) to 1 (increase saturation) */\n saturation: number,\n /** @property range: 0 (no change) to 360 (hue rotation in degrees) */\n hue: number,\n /** @property image negative */\n negative: boolean,\n /** @property image sepia colors */\n sepia: boolean,\n /** @property image vintage colors */\n vintage: boolean,\n /** @property image kodachrome colors */\n kodachrome: boolean,\n /** @property image technicolor colors */\n technicolor: boolean,\n /** @property image polaroid camera effect */\n polaroid: boolean,\n /** @property range: 0 (no pixelate) to N (number of pixels to pixelate) */\n pixelate: number,\n}\n\n/** Controlls gesture detection */\nexport interface GestureConfig {\n /** @property is gesture detection enabled? */\n enabled: boolean,\n}\n\n/**\n * Configuration interface definition for **Human** library\n *\n * Contains all configurable parameters\n * @typedef Config\n *\n * Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)\n */\nexport interface Config {\n /** Backend used for TFJS operations\n * valid build-in backends are:\n * - Browser: `cpu`, `wasm`, `webgl`, `humangl`, `webgpu`\n * - NodeJS: `cpu`, `wasm`, `tensorflow`\n * default: `humangl` for browser and `tensorflow` for nodejs\n */\n backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu',\n\n /** Path to *.wasm files if backend is set to `wasm`\n * default: auto-detects to link to CDN `jsdelivr` when running in browser\n */\n wasmPath: string,\n\n /** Print debug statements to console\n * default: `true`\n */\n debug: boolean,\n\n /** Perform model loading and inference concurrently or sequentially\n * default: `true`\n */\n async: boolean,\n\n /** What to use for `human.warmup()`\n * - warmup pre-initializes all models for faster inference but can take significant time on startup\n * - used by `webgl`, `humangl` and `webgpu` backends\n * default: `full`\n */\n warmup: 'none' | 'face' | 'full' | 'body',\n // warmup: string;\n\n /** Base model path (typically starting with file://, http:// or https://) for all models\n * - individual modelPath values are relative to this path\n * default: `../models/` for browsers and `file://models/` for nodejs\n */\n modelBasePath: string,\n\n /** Cache sensitivity\n * - values 0..1 where 0.01 means reset cache if input changed more than 1%\n * - set to 0 to disable caching\n * default: 0.7\n */\n cacheSensitivity: number;\n\n /** Internal Variable */\n skipAllowed: boolean;\n\n /** {@link FilterConfig} */\n filter: Partial,\n\n /** {@link GestureConfig} */\n gesture: Partial;\n\n /** {@link FaceConfig} */\n face: Partial,\n\n /** {@link BodyConfig} */\n body: Partial,\n\n /** {@link HandConfig} */\n hand: Partial,\n\n /** {@link ObjectConfig} */\n object: Partial,\n\n /** {@link SegmentationConfig} */\n segmentation: Partial,\n}\n\n/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L253) */\nconst config: Config = {\n backend: '',\n modelBasePath: '',\n wasmPath: '',\n debug: true,\n async: true,\n warmup: 'full',\n cacheSensitivity: 0.70,\n skipAllowed: false,\n filter: {\n enabled: true,\n equalization: false,\n width: 0,\n height: 0,\n flip: false,\n return: true,\n brightness: 0,\n contrast: 0,\n sharpness: 0,\n blur: 0,\n saturation: 0,\n hue: 0,\n negative: false,\n sepia: false,\n vintage: false,\n kodachrome: false,\n technicolor: false,\n polaroid: false,\n pixelate: 0,\n },\n gesture: {\n enabled: true,\n },\n face: {\n enabled: true,\n detector: {\n modelPath: 'blazeface.json',\n rotation: true,\n maxDetected: 1,\n skipFrames: 99,\n skipTime: 2500,\n minConfidence: 0.2,\n iouThreshold: 0.1,\n return: false,\n },\n mesh: {\n enabled: true,\n modelPath: 'facemesh.json',\n },\n iris: {\n enabled: true,\n modelPath: 'iris.json',\n },\n emotion: {\n enabled: true,\n minConfidence: 0.1,\n skipFrames: 99,\n skipTime: 1500,\n modelPath: 'emotion.json',\n },\n description: {\n enabled: true,\n modelPath: 'faceres.json',\n skipFrames: 99,\n skipTime: 3000,\n minConfidence: 0.1,\n },\n antispoof: {\n enabled: false,\n skipFrames: 99,\n skipTime: 4000,\n modelPath: 'antispoof.json',\n },\n },\n body: {\n enabled: true,\n modelPath: 'movenet-lightning.json',\n detector: {\n modelPath: '',\n },\n maxDetected: -1,\n minConfidence: 0.3,\n skipFrames: 1,\n skipTime: 200,\n },\n hand: {\n enabled: true,\n rotation: true,\n skipFrames: 99,\n skipTime: 1000,\n minConfidence: 0.50,\n iouThreshold: 0.2,\n maxDetected: -1,\n landmarks: true,\n detector: {\n modelPath: 'handtrack.json',\n },\n skeleton: {\n modelPath: 'handlandmark-full.json',\n },\n },\n object: {\n enabled: false,\n modelPath: 'mb3-centernet.json',\n minConfidence: 0.2,\n iouThreshold: 0.4,\n maxDetected: 10,\n skipFrames: 99,\n skipTime: 2000,\n },\n segmentation: {\n enabled: false,\n modelPath: 'selfie.json',\n blur: 8,\n },\n};\n\nexport { config as defaults };\n", "/*\n Human\n homepage: \n author: '\n*/\n\n// tfjs/tf-browser.ts\nexport * from \"@tensorflow/tfjs/dist/index.js\";\n\n// dist/tfjs.version.js\nvar version = \"3.11.0\";\nvar version2 = \"3.11.0\";\nvar version3 = \"3.11.0\";\nvar version4 = \"3.11.0\";\nvar version5 = \"3.11.0\";\nvar version6 = \"3.11.0\";\nvar version7 = \"3.11.0\";\nvar version8 = \"3.11.0\";\nvar version9 = {\n tfjs: version,\n \"tfjs-core\": version2,\n \"tfjs-data\": version3,\n \"tfjs-layers\": version4,\n \"tfjs-converter\": version5,\n \"tfjs-backend-cpu\": version6,\n \"tfjs-backend-webgl\": version7,\n \"tfjs-backend-wasm\": version8\n};\n\n// tfjs/tf-browser.ts\nimport { Tensor } from \"@tensorflow/tfjs/dist/index.js\";\nimport { GraphModel } from \"@tensorflow/tfjs-converter/dist/index\";\nexport {\n GraphModel,\n Tensor,\n version9 as version\n};\n", "export const vertexIdentity = `\n precision highp float;\n attribute vec2 pos;\n attribute vec2 uv;\n varying vec2 vUv;\n uniform float flipY;\n void main(void) {\n vUv = uv;\n gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);\n }\n`;\n\nexport const fragmentIdentity = `\n precision highp float;\n varying vec2 vUv;\n uniform sampler2D texture;\n void main(void) {\n gl_FragColor = texture2D(texture, vUv);\n }\n`;\n\nexport const colorMatrixWithAlpha = `\n precision highp float;\n varying vec2 vUv;\n uniform sampler2D texture;\n uniform float m[20];\n void main(void) {\n vec4 c = texture2D(texture, vUv);\n gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];\n gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];\n gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];\n gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];\n }\n`;\n\nexport const colorMatrixWithoutAlpha = `\n precision highp float;\n varying vec2 vUv;\n uniform sampler2D texture;\n uniform float m[20];\n void main(void) {\n vec4 c = texture2D(texture, vUv);\n gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];\n gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];\n gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];\n gl_FragColor.a = c.a;\n }\n`;\n\nexport const pixelate = `\n precision highp float;\n varying vec2 vUv;\n uniform vec2 size;\n uniform sampler2D texture;\n vec2 pixelate(vec2 coord, vec2 size) {\n return floor( coord / size ) * size;\n }\n void main(void) {\n gl_FragColor = vec4(0.0);\n vec2 coord = pixelate(vUv, size);\n gl_FragColor += texture2D(texture, coord);\n }\n`;\n\nexport const blur = `\n precision highp float;\n varying vec2 vUv;\n uniform sampler2D texture;\n uniform vec2 px;\n void main(void) {\n gl_FragColor = vec4(0.0);\n gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;\n gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;\n gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;\n gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;\n gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;\n gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;\n gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;\n gl_FragColor += texture2D(texture, vUv )*0.159576912161;\n gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;\n gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;\n gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;\n gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;\n gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;\n gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;\n gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;\n }\n`;\n\nexport const convolution = `\n precision highp float;\n varying vec2 vUv;\n uniform sampler2D texture;\n uniform vec2 px;\n uniform float m[9];\n void main(void) {\n vec4 c11 = texture2D(texture, vUv - px); // top left\n vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y)); // top center\n vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y)); // top right\n vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) ); // mid left\n vec4 c22 = texture2D(texture, vUv); // mid center\n vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) ); // mid right\n vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) ); // bottom left\n vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) ); // bottom center\n vec4 c33 = texture2D(texture, vUv + px ); // bottom right\n gl_FragColor = \n c11 * m[0] + c12 * m[1] + c22 * m[2] +\n c21 * m[3] + c22 * m[4] + c23 * m[5] +\n c31 * m[6] + c32 * m[7] + c33 * m[8];\n gl_FragColor.a = c22.a;\n }\n`;\n", "/**\n * Image Filters in WebGL algoritm implementation\n * Based on: [WebGLImageFilter](https://github.com/phoboslab/WebGLImageFilter)\n */\n\nimport * as shaders from './imagefxshaders';\nimport { canvas } from './image';\n\nconst collect = (source, prefix, collection) => {\n const r = new RegExp('\\\\b' + prefix + ' \\\\w+ (\\\\w+)', 'ig');\n source.replace(r, (match, name) => {\n collection[name] = 0;\n return match;\n });\n};\n\nclass GLProgram {\n uniform = {};\n attribute = {};\n gl: WebGLRenderingContext;\n id: WebGLProgram;\n constructor(gl, vertexSource, fragmentSource) {\n this.gl = gl;\n const vertexShader = this.compile(vertexSource, this.gl.VERTEX_SHADER);\n const fragmentShader = this.compile(fragmentSource, this.gl.FRAGMENT_SHADER);\n this.id = this.gl.createProgram() as WebGLProgram;\n this.gl.attachShader(this.id, vertexShader);\n this.gl.attachShader(this.id, fragmentShader);\n this.gl.linkProgram(this.id);\n if (!this.gl.getProgramParameter(this.id, this.gl.LINK_STATUS)) throw new Error(`filter: gl link failed: ${this.gl.getProgramInfoLog(this.id)}`);\n this.gl.useProgram(this.id);\n collect(vertexSource, 'attribute', this.attribute); // Collect attributes\n for (const a in this.attribute) this.attribute[a] = this.gl.getAttribLocation(this.id, a);\n collect(vertexSource, 'uniform', this.uniform); // Collect uniforms\n collect(fragmentSource, 'uniform', this.uniform);\n for (const u in this.uniform) this.uniform[u] = this.gl.getUniformLocation(this.id, u);\n }\n\n compile = (source, type): WebGLShader => {\n const shader = this.gl.createShader(type) as WebGLShader;\n this.gl.shaderSource(shader, source);\n this.gl.compileShader(shader);\n if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) throw new Error(`filter: gl compile failed: ${this.gl.getShaderInfoLog(shader)}`);\n return shader;\n };\n}\n\n// function that is instantiated as class so it has private this members\n/**\n * @class GLImageFilter\n * @property {function} reset reset current filter chain\n * @property {function} add add specified filter to filter chain\n * @property {function} apply execute filter chain and draw result\n * @property {function} draw just draw input to result\n */\n\nexport function GLImageFilter() {\n let drawCount = 0;\n let sourceTexture: WebGLTexture | null = null;\n let lastInChain = false;\n let currentFramebufferIndex = -1;\n let tempFramebuffers: [null, null] | [{ fbo: WebGLFramebuffer | null, texture: WebGLTexture | null }] = [null, null];\n let filterChain: Record[] = [];\n let vertexBuffer: WebGLBuffer | null = null;\n let currentProgram: GLProgram | null = null;\n const fxcanvas = canvas(100, 100);\n const shaderProgramCache = { }; // key is the shader program source, value is the compiled program\n const DRAW = { INTERMEDIATE: 1 };\n const gl = fxcanvas.getContext('webgl') as WebGLRenderingContext;\n if (!gl) throw new Error('filter: cannot get webgl context');\n\n function resize(width, height) {\n if (width === fxcanvas.width && height === fxcanvas.height) return; // Same width/height? Nothing to do here\n fxcanvas.width = width;\n fxcanvas.height = height;\n if (!vertexBuffer) { // Create the context if we don't have it yet\n const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6\n vertexBuffer = gl.createBuffer();\n gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);\n gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);\n gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);\n }\n gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);\n tempFramebuffers = [null, null]; // Delete old temp framebuffers\n }\n\n function createFramebufferTexture(width, height) {\n const fbo = gl.createFramebuffer() as WebGLFramebuffer;\n gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);\n const renderbuffer = gl.createRenderbuffer();\n gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);\n const texture = gl.createTexture() as WebGLTexture;\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n return { fbo, texture };\n }\n\n function getTempFramebuffer(index) {\n tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);\n return tempFramebuffers[index] as { fbo: WebGLFramebuffer, texture: WebGLTexture };\n }\n\n function draw(flags = 0) {\n if (!currentProgram) return;\n let source: WebGLTexture | null = null;\n let target: WebGLFramebuffer | null = null;\n let flipY = false;\n if (drawCount === 0) source = sourceTexture; // First draw call - use the source texture\n else source = getTempFramebuffer(currentFramebufferIndex).texture || null; // All following draw calls use the temp buffer last drawn to\n drawCount++;\n if (lastInChain && !(flags & DRAW.INTERMEDIATE)) { // Last filter in our chain - draw directly to the WebGL Canvas. We may also have to flip the image vertically now\n target = null;\n flipY = drawCount % 2 === 0;\n } else {\n currentFramebufferIndex = (currentFramebufferIndex + 1) % 2;\n target = getTempFramebuffer(currentFramebufferIndex).fbo || null; // Intermediate draw call - get a temp buffer to draw to\n }\n gl.bindTexture(gl.TEXTURE_2D, source); // Bind the source and target and draw the two triangles\n gl.bindFramebuffer(gl.FRAMEBUFFER, target);\n gl.uniform1f(currentProgram.uniform['flipY'], (flipY ? -1 : 1));\n gl.drawArrays(gl.TRIANGLES, 0, 6);\n }\n\n function compileShader(fragmentSource) {\n if (shaderProgramCache[fragmentSource]) {\n currentProgram = shaderProgramCache[fragmentSource];\n gl.useProgram((currentProgram ? currentProgram.id : null) || null);\n return currentProgram as GLProgram;\n }\n currentProgram = new GLProgram(gl, shaders.vertexIdentity, fragmentSource);\n const floatSize = Float32Array.BYTES_PER_ELEMENT;\n const vertSize = 4 * floatSize;\n gl.enableVertexAttribArray(currentProgram.attribute['pos']);\n gl.vertexAttribPointer(currentProgram.attribute['pos'], 2, gl.FLOAT, false, vertSize, 0 * floatSize);\n gl.enableVertexAttribArray(currentProgram.attribute['uv']);\n gl.vertexAttribPointer(currentProgram.attribute['uv'], 2, gl.FLOAT, false, vertSize, 2 * floatSize);\n shaderProgramCache[fragmentSource] = currentProgram;\n return currentProgram as GLProgram;\n }\n\n const filter = {\n colorMatrix: (matrix) => { // general color matrix filter\n const m = new Float32Array(matrix);\n m[4] /= 255;\n m[9] /= 255;\n m[14] /= 255;\n m[19] /= 255;\n const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0) // Can we ignore the alpha value? Makes things a bit faster.\n ? shaders.colorMatrixWithoutAlpha\n : shaders.colorMatrixWithAlpha;\n const program = compileShader(shader);\n gl.uniform1fv(program.uniform['m'], m);\n draw();\n },\n\n brightness: (brightness) => {\n const b = (brightness || 0) + 1;\n filter.colorMatrix([\n b, 0, 0, 0, 0,\n 0, b, 0, 0, 0,\n 0, 0, b, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n saturation: (amount) => {\n const x = (amount || 0) * 2 / 3 + 1;\n const y = ((x - 1) * -0.5);\n filter.colorMatrix([\n x, y, y, 0, 0,\n y, x, y, 0, 0,\n y, y, x, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n desaturate: () => {\n filter.saturation(-1);\n },\n\n contrast: (amount) => {\n const v = (amount || 0) + 1;\n const o = -128 * (v - 1);\n filter.colorMatrix([\n v, 0, 0, 0, o,\n 0, v, 0, 0, o,\n 0, 0, v, 0, o,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n negative: () => {\n filter.contrast(-2);\n },\n\n hue: (rotation) => {\n rotation = (rotation || 0) / 180 * Math.PI;\n const cos = Math.cos(rotation);\n const sin = Math.sin(rotation);\n const lumR = 0.213;\n const lumG = 0.715;\n const lumB = 0.072;\n filter.colorMatrix([\n lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,\n lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,\n lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n desaturateLuminance: () => {\n filter.colorMatrix([\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n sepia: () => {\n filter.colorMatrix([\n 0.393, 0.7689999, 0.18899999, 0, 0,\n 0.349, 0.6859999, 0.16799999, 0, 0,\n 0.272, 0.5339999, 0.13099999, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n brownie: () => {\n filter.colorMatrix([\n 0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,\n -0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,\n 0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n vintagePinhole: () => {\n filter.colorMatrix([\n 0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,\n 0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,\n 0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n kodachrome: () => {\n filter.colorMatrix([\n 1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,\n -0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,\n -0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n technicolor: () => {\n filter.colorMatrix([\n 1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,\n -0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,\n -0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n polaroid: () => {\n filter.colorMatrix([\n 1.438, -0.062, -0.062, 0, 0,\n -0.122, 1.378, -0.122, 0, 0,\n -0.016, -0.016, 1.483, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n shiftToBGR: () => {\n filter.colorMatrix([\n 0, 0, 1, 0, 0,\n 0, 1, 0, 0, 0,\n 1, 0, 0, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n },\n\n convolution: (matrix) => { // general convolution Filter\n const m = new Float32Array(matrix);\n const pixelSizeX = 1 / fxcanvas.width;\n const pixelSizeY = 1 / fxcanvas.height;\n const program = compileShader(shaders.convolution);\n gl.uniform1fv(program.uniform['m'], m);\n gl.uniform2f(program.uniform['px'], pixelSizeX, pixelSizeY);\n draw();\n },\n\n detectEdges: () => {\n // @ts-ignore this\n filter.convolution.call(this, [\n 0, 1, 0,\n 1, -4, 1,\n 0, 1, 0,\n ]);\n },\n\n sobelX: () => {\n // @ts-ignore this\n filter.convolution.call(this, [\n -1, 0, 1,\n -2, 0, 2,\n -1, 0, 1,\n ]);\n },\n\n sobelY: () => {\n // @ts-ignore this\n filter.convolution.call(this, [\n -1, -2, -1,\n 0, 0, 0,\n 1, 2, 1,\n ]);\n },\n\n sharpen: (amount) => {\n const a = amount || 1;\n // @ts-ignore this\n filter.convolution.call(this, [\n 0, -1 * a, 0,\n -1 * a, 1 + 4 * a, -1 * a,\n 0, -1 * a, 0,\n ]);\n },\n\n emboss: (size) => {\n const s = size || 1;\n // @ts-ignore this\n filter.convolution.call(this, [\n -2 * s, -1 * s, 0,\n -1 * s, 1, 1 * s,\n 0, 1 * s, 2 * s,\n ]);\n },\n\n blur: (size) => {\n const blurSizeX = (size / 7) / fxcanvas.width;\n const blurSizeY = (size / 7) / fxcanvas.height;\n const program = compileShader(shaders.blur);\n // Vertical\n gl.uniform2f(program.uniform['px'], 0, blurSizeY);\n draw(DRAW.INTERMEDIATE);\n // Horizontal\n gl.uniform2f(program.uniform['px'], blurSizeX, 0);\n draw();\n },\n\n pixelate: (size) => {\n const blurSizeX = (size) / fxcanvas.width;\n const blurSizeY = (size) / fxcanvas.height;\n const program = compileShader(shaders.pixelate);\n gl.uniform2f(program.uniform['size'], blurSizeX, blurSizeY);\n draw();\n },\n };\n\n // @ts-ignore this\n this.add = function (name) {\n // eslint-disable-next-line prefer-rest-params\n const args = Array.prototype.slice.call(arguments, 1);\n const func = filter[name];\n filterChain.push({ func, args });\n };\n\n // @ts-ignore this\n this.reset = function () {\n filterChain = [];\n };\n\n // @ts-ignore this\n this.get = function () {\n return filterChain;\n };\n\n // @ts-ignore this\n this.apply = function (image) {\n resize(image.width, image.height);\n drawCount = 0;\n if (!sourceTexture) sourceTexture = gl.createTexture(); // Create the texture for the input image if we haven't yet\n gl.bindTexture(gl.TEXTURE_2D, sourceTexture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);\n for (let i = 0; i < filterChain.length; i++) {\n lastInChain = (i === filterChain.length - 1);\n const f = filterChain[i];\n // @ts-ignore function assigment\n f.func.apply(this, f.args || []);\n }\n return fxcanvas;\n };\n\n // @ts-ignore this\n this.draw = function (image) {\n this.add('brightness', 0);\n return this.apply(image);\n };\n}\n", "/**\n * Image enhancements\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { Tensor } from '../exports';\n\nexport function histogramEqualization(input: Tensor): Tensor {\n const channels = tf.split(input, 3, 2);\n const min: Tensor[] = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];\n const max: Tensor[] = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];\n const sub = [tf.sub(channels[0], min[0]), tf.sub(channels[1], min[1]), tf.sub(channels[2], min[2])];\n const range = [tf.sub(max[0], min[0]), tf.sub(max[1], min[1]), tf.sub(max[2], min[2])];\n const fact = [tf.div(255, range[0]), tf.div(255, range[1]), tf.div(255, range[2])];\n const enh = [tf.mul(sub[0], fact[0]), tf.mul(sub[1], fact[1]), tf.mul(sub[2], fact[2])];\n const rgb = tf.stack([enh[0], enh[1], enh[2]], 2);\n const reshape = tf.reshape(rgb, [1, input.shape[0], input.shape[1], 3]);\n tf.dispose([...channels, ...min, ...max, ...sub, ...range, ...fact, ...enh, rgb]);\n return reshape;\n}\n", "/**\n * Image Processing algorithm implementation\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as fxImage from './imagefx';\nimport type { Input, AnyCanvas, Tensor, Config } from '../exports';\nimport { env } from '../util/env';\nimport { log, now } from '../util/util';\nimport * as enhance from './enhance';\n\nconst maxSize = 2048;\n// internal temp canvases\nlet inCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame\nlet outCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame\nlet tmpCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame\n// @ts-ignore // imagefx is js module that should be converted to a class\nlet fx: fxImage.GLImageFilter | null; // instance of imagefx\n\nexport function canvas(width, height): AnyCanvas {\n let c;\n if (env.browser) { // browser defines canvas object\n if (env.worker) { // if runing in web worker use OffscreenCanvas\n c = new OffscreenCanvas(width, height);\n } else { // otherwise use DOM canvas\n if (typeof document === 'undefined') throw new Error('attempted to run in web worker but offscreenCanvas is not supported');\n c = document.createElement('canvas');\n c.width = width;\n c.height = height;\n }\n } else { // if not running in browser, there is no \"default\" canvas object, so we need monkey patch or fail\n // @ts-ignore // env.canvas is an external monkey-patch\n if (typeof env.Canvas !== 'undefined') c = new env.Canvas(width, height);\n else if (typeof globalThis.Canvas !== 'undefined') c = new globalThis.Canvas(width, height);\n }\n // if (!c) throw new Error('cannot create canvas');\n return c;\n}\n\n// helper function to copy canvas from input to output\nexport function copy(input: AnyCanvas, output?: AnyCanvas) {\n const outputCanvas = output || canvas(input.width, input.height);\n const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D;\n ctx.drawImage(input, 0, 0);\n return outputCanvas;\n}\n\n// process input image and return tensor\n// input can be tensor, imagedata, htmlimageelement, htmlvideoelement\n// input is resized and run through imagefx filter\nexport function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: AnyCanvas | null } {\n if (!input) {\n // throw new Error('input is missing');\n if (config.debug) log('input is missing');\n return { tensor: null, canvas: null }; // video may become temporarily unavailable due to onresize\n }\n // sanity checks since different browsers do not implement all dom elements\n if (\n !(input instanceof tf.Tensor)\n && !(typeof Image !== 'undefined' && input instanceof Image)\n && !(typeof env.Canvas !== 'undefined' && input instanceof env.Canvas)\n && !(typeof globalThis.Canvas !== 'undefined' && input instanceof globalThis.Canvas)\n && !(typeof ImageData !== 'undefined' && input instanceof ImageData)\n && !(typeof ImageBitmap !== 'undefined' && input instanceof ImageBitmap)\n && !(typeof HTMLImageElement !== 'undefined' && input instanceof HTMLImageElement)\n && !(typeof HTMLMediaElement !== 'undefined' && input instanceof HTMLMediaElement)\n && !(typeof HTMLVideoElement !== 'undefined' && input instanceof HTMLVideoElement)\n && !(typeof HTMLCanvasElement !== 'undefined' && input instanceof HTMLCanvasElement)\n && !(typeof OffscreenCanvas !== 'undefined' && input instanceof OffscreenCanvas)\n ) {\n throw new Error('input type is not recognized');\n }\n if (input instanceof tf.Tensor) {\n // if input is tensor, use as-is\n if ((input)['isDisposedInternal']) {\n throw new Error('input tensor is disposed');\n } else if (!(input as Tensor).shape || (input as Tensor).shape.length !== 4 || (input as Tensor).shape[0] !== 1 || (input as Tensor).shape[3] !== 3) {\n throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input['shape']}`);\n } else {\n return { tensor: tf.clone(input), canvas: (config.filter.return ? outCanvas : null) };\n }\n } else {\n // check if resizing will be needed\n if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {\n if (config.debug) log('input stream is not ready');\n return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize\n }\n const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));\n const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));\n if (!originalWidth || !originalHeight) {\n if (config.debug) log('cannot determine input dimensions');\n return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize\n }\n let targetWidth = originalWidth;\n let targetHeight = originalHeight;\n if (targetWidth > maxSize) {\n targetWidth = maxSize;\n targetHeight = Math.trunc(targetWidth * originalHeight / originalWidth);\n }\n if (targetHeight > maxSize) {\n targetHeight = maxSize;\n targetWidth = Math.trunc(targetHeight * originalWidth / originalHeight);\n }\n\n // create our canvas and resize it if needed\n if ((config.filter.width || 0) > 0) targetWidth = config.filter.width;\n else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight);\n if ((config.filter.height || 0) > 0) targetHeight = config.filter.height;\n else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth);\n if (!targetWidth || !targetHeight) throw new Error('input cannot determine dimension');\n if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);\n\n // draw input to our canvas\n const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D;\n if ((typeof ImageData !== 'undefined') && (input instanceof ImageData)) {\n inCtx.putImageData(input, 0, 0);\n } else {\n if (config.filter.flip && typeof inCtx.translate !== 'undefined') {\n inCtx.translate(originalWidth, 0);\n inCtx.scale(-1, 1);\n inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults\n } else {\n inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n }\n }\n\n if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas\n\n // imagefx transforms using gl from input canvas to output canvas\n if (config.filter.enabled && env.webgl.supported) {\n if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')\n env.filter = !!fx;\n if (!fx) return { tensor: null, canvas: inCanvas };\n fx.reset();\n if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness);\n if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast);\n if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness);\n if (config.filter.blur !== 0) fx.add('blur', config.filter.blur);\n if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation);\n if (config.filter.hue !== 0) fx.add('hue', config.filter.hue);\n if (config.filter.negative) fx.add('negative');\n if (config.filter.sepia) fx.add('sepia');\n if (config.filter.vintage) fx.add('brownie');\n if (config.filter.sepia) fx.add('sepia');\n if (config.filter.kodachrome) fx.add('kodachrome');\n if (config.filter.technicolor) fx.add('technicolor');\n if (config.filter.polaroid) fx.add('polaroid');\n if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);\n if (fx.get() > 0) outCanvas = fx.apply(inCanvas);\n else outCanvas = fx.draw(inCanvas);\n } else {\n copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas\n if (fx) fx = null;\n env.filter = !!fx;\n }\n\n if (!getTensor) return { tensor: null, canvas: outCanvas }; // just canvas was requested\n if (!outCanvas) throw new Error('cannot create output canvas');\n\n // create tensor from image unless input was a tensor already\n let pixels;\n let depth = 3;\n if ((typeof ImageData !== 'undefined' && input instanceof ImageData) || (input['data'] && input['width'] && input['height'])) { // if input is imagedata, just use it\n if (env.browser && tf.browser) {\n pixels = tf.browser ? tf.browser.fromPixels(input) : null;\n } else {\n depth = input['data'].length / input['height'] / input['width'];\n // const arr = Uint8Array.from(input['data']);\n const arr = new Uint8Array(input['data']['buffer']);\n pixels = tf.tensor(arr, [input['height'], input['width'], depth], 'int32');\n }\n } else {\n if (!tmpCanvas || (outCanvas.width !== tmpCanvas.width) || (outCanvas.height !== tmpCanvas.height)) tmpCanvas = canvas(outCanvas.width, outCanvas.height); // init output canvas\n if (tf.browser && env.browser) {\n if (config.backend === 'webgl' || config.backend === 'humangl' || config.backend === 'webgpu') {\n pixels = tf.browser.fromPixels(outCanvas); // safe to reuse since both backend and context are gl based\n } else {\n tmpCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas\n pixels = tf.browser.fromPixels(tmpCanvas);\n }\n } else {\n const tempCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas\n const tempCtx = tempCanvas.getContext('2d') as CanvasRenderingContext2D;\n const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);\n depth = tempData.data.length / targetWidth / targetHeight;\n const arr = new Uint8Array(tempData.data.buffer);\n pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);\n }\n }\n if (depth === 4) { // rgba to rgb\n const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel\n tf.dispose(pixels);\n pixels = rgb;\n /*\n const channels = tf.split(pixels, 4, 2); // split rgba to channels\n tf.dispose(pixels);\n const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb and ignore alpha\n pixels = tf.reshape(rgb, [rgb.shape[0], rgb.shape[1], 3]); // move extra dim from the end of tensor and use it as batch number instead\n tf.dispose([rgb, ...channels]);\n */\n }\n if (!pixels) throw new Error('cannot create tensor from input');\n const casted = tf.cast(pixels, 'float32');\n const tensor = config.filter.equalization ? enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);\n tf.dispose([pixels, casted]);\n return { tensor, canvas: (config.filter.return ? outCanvas : null) };\n }\n}\n\nlet lastInputSum = 0;\nlet lastCacheDiff = 1;\nlet benchmarked = 0;\n\nconst checksum = async (input: Tensor): Promise => { // use tf sum or js based sum loop depending on which is faster\n const resizeFact = 48;\n const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);\n const tfSum = async (): Promise => {\n const sumT = tf.sum(reduced);\n const sum0 = await sumT.data();\n tf.dispose(sumT);\n return sum0[0];\n };\n const jsSum = async (): Promise => {\n const reducedData = await reduced.data(); // raw image rgb array\n let sum0 = 0;\n for (let i = 0; i < reducedData.length / 3; i++) sum0 += reducedData[3 * i + 2]; // look only at green value of each pixel\n return sum0;\n };\n if (benchmarked === 0) {\n const t0 = now();\n await jsSum();\n const t1 = now();\n await tfSum();\n const t2 = now();\n benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;\n }\n const res = benchmarked === 1 ? await jsSum() : await tfSum();\n tf.dispose(reduced);\n return res;\n};\n\nexport async function skip(config, input: Tensor) {\n if (config.cacheSensitivity === 0) return false;\n const sum = await checksum(input);\n const diff = 100 * (Math.max(sum, lastInputSum) / Math.min(sum, lastInputSum) - 1);\n lastInputSum = sum;\n // if previous frame was skipped, skip this frame if changed more than cacheSensitivity\n // if previous frame was not skipped, then look for cacheSensitivity or difference larger than one in previous frame to avoid resetting cache in subsequent frames unnecessarily\n let skipFrame = diff < Math.max(config.cacheSensitivity, lastCacheDiff);\n // if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images\n lastCacheDiff = diff > 10 * config.cacheSensitivity ? 0 : diff;\n skipFrame = skipFrame && (lastCacheDiff > 0); // if no cached diff value then force no skip\n return skipFrame;\n}\n", "import * as tf from '../../dist/tfjs.esm.js';\nimport * as image from '../image/image';\n\n/** Env class that holds detected capabilities */\nexport class Env {\n /** Running in Browser */\n browser: boolean;\n /** Running in NodeJS */\n node: boolean;\n /** Running in WebWorker thread */\n worker: boolean;\n /** Detected platform */\n platform: string = '';\n /** Detected agent */\n agent: string = '';\n /** List of supported backends */\n backends: string[] = [];\n /** Has any work been performed so far */\n initial: boolean;\n /** Are image filters supported? */\n filter: boolean | undefined;\n /** TFJS instance details */\n tfjs: {\n version: undefined | string,\n };\n /** Is offscreenCanvas supported? */\n offscreen: undefined | boolean;\n /** Are performance counter instant values or additive */\n perfadd: boolean = false;\n /** WASM detected capabilities */\n wasm: {\n supported: undefined | boolean,\n backend: undefined | boolean,\n simd: undefined | boolean,\n multithread: undefined | boolean,\n } = {\n supported: undefined,\n backend: undefined,\n simd: undefined,\n multithread: undefined,\n };\n /** WebGL detected capabilities */\n webgl: {\n supported: undefined | boolean,\n backend: undefined | boolean,\n version: undefined | string,\n renderer: undefined | string,\n } = {\n supported: undefined,\n backend: undefined,\n version: undefined,\n renderer: undefined,\n };\n /** WebGPU detected capabilities */\n webgpu: {\n supported: undefined | boolean,\n backend: undefined | boolean,\n adapter: undefined | string,\n } = {\n supported: undefined,\n backend: undefined,\n adapter: undefined,\n };\n /** CPU info */\n cpu: {\n model: undefined | string,\n flags: string[],\n } = {\n model: undefined,\n flags: [],\n };\n /** List of supported kernels for current backend */\n kernels: string[] = [];\n /** MonkeyPatch for Canvas */\n Canvas: undefined;\n /** MonkeyPatch for Image */\n Image: undefined;\n /** MonkeyPatch for ImageData */\n ImageData: undefined;\n\n constructor() {\n this.browser = typeof navigator !== 'undefined';\n this.node = typeof process !== 'undefined';\n this.tfjs = { version: tf.version_core };\n this.offscreen = typeof OffscreenCanvas !== 'undefined';\n this.initial = true;\n // @ts-ignore WorkerGlobalScope evaluated in browser only\n this.worker = this.browser && this.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;\n if (typeof navigator !== 'undefined') {\n const raw = navigator.userAgent.match(/\\(([^()]+)\\)/g);\n if (raw && raw[0]) {\n const platformMatch = raw[0].match(/\\(([^()]+)\\)/g);\n this.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\\(|\\)/g, '') : '';\n this.agent = navigator.userAgent.replace(raw[0], '');\n if (this.platform[1]) this.agent = this.agent.replace(raw[1], '');\n this.agent = this.agent.replace(/ /g, ' ');\n // chrome offscreencanvas gpu memory leak\n /*\n const isChrome = env.agent.match(/Chrome\\/.[0-9]/g);\n const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;\n if (verChrome > 92 && verChrome < 96) {\n log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');\n this.offscreen = false;\n }\n */\n }\n } else if (typeof process !== 'undefined') {\n this.platform = `${process.platform} ${process.arch}`;\n this.agent = `NodeJS ${process.version}`;\n }\n }\n\n async updateBackend() {\n // analyze backends\n this.backends = Object.keys(tf.engine().registryFactory);\n this.wasm.supported = typeof WebAssembly !== 'undefined';\n this.wasm.backend = this.backends.includes('wasm');\n if (this.wasm.supported && this.wasm.backend && tf.getBackend() === 'wasm') {\n this.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n this.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');\n }\n const c = image.canvas(100, 100);\n const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts\n // const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;\n this.webgl.supported = typeof ctx !== 'undefined';\n this.webgl.backend = this.backends.includes('webgl');\n if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {\n // @ts-ignore getGPGPUContext only exists on WebGL backend\n const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;\n if (gl) {\n this.webgl.version = gl.getParameter(gl.VERSION);\n this.webgl.renderer = gl.getParameter(gl.RENDERER);\n }\n }\n this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined';\n this.webgpu.backend = this.backends.includes('webgpu');\n if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter()).name;\n // enumerate kernels\n this.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());\n }\n\n async updateCPU() {\n const cpu = { model: '', flags: [] };\n if (this.node && this.platform.startsWith('linux')) {\n // eslint-disable-next-line global-require\n const fs = require('fs');\n try {\n const data = fs.readFileSync('/proc/cpuinfo').toString();\n for (const line of data.split('\\n')) {\n if (line.startsWith('model name')) {\n cpu.model = line.match(/:(.*)/g)[0].replace(':', '').trim();\n }\n if (line.startsWith('flags')) {\n cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();\n }\n }\n } catch { /**/ }\n }\n if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });\n else this['cpu'] = cpu;\n }\n}\n\nexport const env = new Env();\n", "/**\n * GEAR [gender/emotion/age/race] model implementation\n *\n * Based on: [**GEAR Predictor**](https://github.com/Udolf15/GEAR-Predictor)\n *\n * Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis\n * Config placeholder: agegenderrace: { enabled: true, modelPath: 'gear.json' },\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { Config } from '../config';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\n\nlet last = { age: 0 };\nlet lastTime = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function load(config: Config | any) {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.agegenderrace.modelPath)) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.agegenderrace.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function predict(image: Tensor, config: Config) {\n if (!model) return null;\n // @ts-ignore config disabled\n const skipFrame = skipped < (config.face.agegenderrace?.skipFrames || 0);\n // @ts-ignore config disabled\n const skipTime = (config.face.agegenderrace?.skipTime || 0) > (now() - lastTime);\n // @ts-ignore config disabled\n if (config.skipAllowed && skipTime && skipFrame && last.age && (last.age > 0)) {\n skipped++;\n return last;\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n if (!model?.inputs[0].shape) return;\n const resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape[2], model?.inputs[0].shape[1]], false);\n // const enhance = tf.mul(resize, [255.0]);\n\n let ageT;\n let genderT;\n let raceT;\n const obj = { age: 0 };\n\n // @ts-ignore array definition unavailable at compile time\n if (config.face.agegenderrace.enabled) [ageT, genderT, raceT] = model.execute(resize, ['age_output', 'gender_output', 'race_output']);\n lastTime = now();\n tf.dispose(resize);\n // tf.dispose(enhance);\n\n if (ageT) {\n // const data = await ageT.data();\n // {0: 'below_20', 1: '21-25', 2: '26-30', 3: '31-40',4: '41-50', 5: '51-60', 6: 'Above60'}\n }\n if (genderT) {\n // const data = await genderT.data();\n }\n if (raceT) {\n // const data = await raceT.data();\n // {0: 'white', 1: 'black', 2: 'asian', 3: 'indian', 4: 'others'}\n }\n\n tf.dispose(ageT);\n tf.dispose(genderT);\n tf.dispose(raceT);\n\n last = obj;\n resolve(obj);\n });\n}\n", "/**\n * Anti-spoofing model implementation\n */\n\nimport { log, join, now } from '../util/util';\nimport type { Config } from '../config';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\nconst cached: Array = [];\nlet skipped = Number.MAX_SAFE_INTEGER;\nlet lastCount = 0;\nlet lastTime = 0;\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.antispoof?.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.antispoof?.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\nexport async function predict(image: Tensor, config: Config, idx, count) {\n if (!model) return null;\n const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && cached[idx]) {\n skipped++;\n return cached[idx];\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n const resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape ? model.inputs[0].shape[2] : 0, model?.inputs[0].shape ? model.inputs[0].shape[1] : 0], false);\n const res = model?.execute(resize) as Tensor;\n const num = (await res.data())[0];\n cached[idx] = Math.round(100 * num) / 100;\n lastCount = count;\n lastTime = now();\n tf.dispose([resize, res]);\n resolve(cached[idx]);\n });\n}\n", "/**\n * BlazeFace, FaceMesh & Iris model implementation\n * See `facemesh.ts` for entry point\n */\n\nexport const meshAnnotations: Record = {\n silhouette: [\n 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,\n 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,\n 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109,\n ],\n lipsUpperOuter: [61, 185, 40, 39, 37, 0, 267, 269, 270, 409, 291],\n lipsLowerOuter: [146, 91, 181, 84, 17, 314, 405, 321, 375, 291],\n lipsUpperInner: [78, 191, 80, 81, 82, 13, 312, 311, 310, 415, 308],\n lipsLowerInner: [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308],\n rightEyeUpper0: [246, 161, 160, 159, 158, 157, 173],\n rightEyeLower0: [33, 7, 163, 144, 145, 153, 154, 155, 133],\n rightEyeUpper1: [247, 30, 29, 27, 28, 56, 190],\n rightEyeLower1: [130, 25, 110, 24, 23, 22, 26, 112, 243],\n rightEyeUpper2: [113, 225, 224, 223, 222, 221, 189],\n rightEyeLower2: [226, 31, 228, 229, 230, 231, 232, 233, 244],\n rightEyeLower3: [143, 111, 117, 118, 119, 120, 121, 128, 245],\n rightEyebrowUpper: [156, 70, 63, 105, 66, 107, 55, 193],\n rightEyebrowLower: [35, 124, 46, 53, 52, 65],\n rightEyeIris: [473, 474, 475, 476, 477],\n leftEyeUpper0: [466, 388, 387, 386, 385, 384, 398],\n leftEyeLower0: [263, 249, 390, 373, 374, 380, 381, 382, 362],\n leftEyeUpper1: [467, 260, 259, 257, 258, 286, 414],\n leftEyeLower1: [359, 255, 339, 254, 253, 252, 256, 341, 463],\n leftEyeUpper2: [342, 445, 444, 443, 442, 441, 413],\n leftEyeLower2: [446, 261, 448, 449, 450, 451, 452, 453, 464],\n leftEyeLower3: [372, 340, 346, 347, 348, 349, 350, 357, 465],\n leftEyebrowUpper: [383, 300, 293, 334, 296, 336, 285, 417],\n leftEyebrowLower: [265, 353, 276, 283, 282, 295],\n leftEyeIris: [468, 469, 470, 471, 472],\n midwayBetweenEyes: [168],\n noseTip: [1],\n noseBottom: [2],\n noseRightCorner: [98],\n noseLeftCorner: [327],\n rightCheek: [205],\n leftCheek: [425],\n};\n\nexport const meshLandmarks: Record = {\n count: 468,\n mouth: 13,\n symmetryLine: [13, meshAnnotations['midwayBetweenEyes'][0]],\n};\n\nexport const blazeFaceLandmarks: Record = {\n leftEye: 0,\n rightEye: 1,\n nose: 2,\n mouth: 3,\n leftEar: 4,\n rightEar: 5,\n symmetryLine: [3, 2],\n};\n\nexport const MESH_TO_IRIS_INDICES_MAP: Array<{ key: string, indices: number[] }> = [ // A mapping from facemesh model keypoints to iris model keypoints.\n { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },\n { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },\n { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },\n { key: 'EyeLower0', indices: [0, 1, 2, 3, 4, 5, 6, 7, 8] },\n { key: 'EyeLower1', indices: [16, 17, 18, 19, 20, 21, 22, 23, 24] },\n { key: 'EyeLower2', indices: [32, 33, 34, 35, 36, 37, 38, 39, 40] },\n { key: 'EyeLower3', indices: [54, 55, 56, 57, 58, 59, 60, 61, 62] },\n // { key: 'EyebrowUpper', indices: [63, 64, 65, 66, 67, 68, 69, 70] },\n // { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },\n];\n\nexport const UV468: [number, number][] = [\n [0.499976992607117, 0.652534008026123],\n [0.500025987625122, 0.547487020492554],\n [0.499974012374878, 0.602371990680695],\n [0.482113003730774, 0.471979022026062],\n [0.500150978565216, 0.527155995368958],\n [0.499909996986389, 0.498252987861633],\n [0.499523013830185, 0.40106201171875],\n [0.289712011814117, 0.380764007568359],\n [0.499954998493195, 0.312398016452789],\n [0.499987006187439, 0.269918978214264],\n [0.500023007392883, 0.107050001621246],\n [0.500023007392883, 0.666234016418457],\n [0.5000159740448, 0.679224014282227],\n [0.500023007392883, 0.692348003387451],\n [0.499976992607117, 0.695277988910675],\n [0.499976992607117, 0.70593398809433],\n [0.499976992607117, 0.719385027885437],\n [0.499976992607117, 0.737019002437592],\n [0.499967992305756, 0.781370997428894],\n [0.499816000461578, 0.562981009483337],\n [0.473773002624512, 0.573909997940063],\n [0.104906998574734, 0.254140973091125],\n [0.365929991006851, 0.409575998783112],\n [0.338757991790771, 0.41302502155304],\n [0.311120003461838, 0.409460008144379],\n [0.274657994508743, 0.389131009578705],\n [0.393361985683441, 0.403706014156342],\n [0.345234006643295, 0.344011008739471],\n [0.370094001293182, 0.346076011657715],\n [0.319321990013123, 0.347265005111694],\n [0.297903001308441, 0.353591024875641],\n [0.24779200553894, 0.410809993743896],\n [0.396889001131058, 0.842755019664764],\n [0.280097991228104, 0.375599980354309],\n [0.106310002505779, 0.399955987930298],\n [0.2099249958992, 0.391353011131287],\n [0.355807989835739, 0.534406006336212],\n [0.471751004457474, 0.65040397644043],\n [0.474155008792877, 0.680191993713379],\n [0.439785003662109, 0.657229006290436],\n [0.414617002010345, 0.66654098033905],\n [0.450374007225037, 0.680860996246338],\n [0.428770989179611, 0.682690978050232],\n [0.374971002340317, 0.727805018424988],\n [0.486716985702515, 0.547628998756409],\n [0.485300987958908, 0.527395009994507],\n [0.257764995098114, 0.314490020275116],\n [0.401223003864288, 0.455172002315521],\n [0.429818987846375, 0.548614978790283],\n [0.421351999044418, 0.533740997314453],\n [0.276895999908447, 0.532056987285614],\n [0.483370006084442, 0.499586999416351],\n [0.33721199631691, 0.282882988452911],\n [0.296391993761063, 0.293242990970612],\n [0.169294998049736, 0.193813979625702],\n [0.447580009698868, 0.302609980106354],\n [0.392390012741089, 0.353887975215912],\n [0.354490011930466, 0.696784019470215],\n [0.067304998636246, 0.730105042457581],\n [0.442739009857178, 0.572826027870178],\n [0.457098007202148, 0.584792017936707],\n [0.381974011659622, 0.694710969924927],\n [0.392388999462128, 0.694203019142151],\n [0.277076005935669, 0.271932005882263],\n [0.422551989555359, 0.563233017921448],\n [0.385919004678726, 0.281364023685455],\n [0.383103013038635, 0.255840003490448],\n [0.331431001424789, 0.119714021682739],\n [0.229923993349075, 0.232002973556519],\n [0.364500999450684, 0.189113974571228],\n [0.229622006416321, 0.299540996551514],\n [0.173287004232407, 0.278747975826263],\n [0.472878992557526, 0.666198015213013],\n [0.446828007698059, 0.668527007102966],\n [0.422762006521225, 0.673889994621277],\n [0.445307999849319, 0.580065965652466],\n [0.388103008270264, 0.693961024284363],\n [0.403039008378983, 0.706539988517761],\n [0.403629004955292, 0.693953037261963],\n [0.460041999816895, 0.557139039039612],\n [0.431158006191254, 0.692366003990173],\n [0.452181994915009, 0.692366003990173],\n [0.475387006998062, 0.692366003990173],\n [0.465828001499176, 0.779190003871918],\n [0.472328990697861, 0.736225962638855],\n [0.473087012767792, 0.717857003211975],\n [0.473122000694275, 0.704625964164734],\n [0.473033010959625, 0.695277988910675],\n [0.427942007780075, 0.695277988910675],\n [0.426479011774063, 0.703539967536926],\n [0.423162013292313, 0.711845993995667],\n [0.4183090031147, 0.720062971115112],\n [0.390094995498657, 0.639572978019714],\n [0.013953999616206, 0.560034036636353],\n [0.499913990497589, 0.58014702796936],\n [0.413199990987778, 0.69539999961853],\n [0.409626007080078, 0.701822996139526],\n [0.468080013990402, 0.601534962654114],\n [0.422728985548019, 0.585985004901886],\n [0.463079988956451, 0.593783974647522],\n [0.37211999297142, 0.47341400384903],\n [0.334562003612518, 0.496073007583618],\n [0.411671012639999, 0.546965003013611],\n [0.242175996303558, 0.14767599105835],\n [0.290776997804642, 0.201445996761322],\n [0.327338010072708, 0.256527006626129],\n [0.399509996175766, 0.748921036720276],\n [0.441727995872498, 0.261676013469696],\n [0.429764986038208, 0.187834024429321],\n [0.412198007106781, 0.108901023864746],\n [0.288955003023148, 0.398952007293701],\n [0.218936994671822, 0.435410976409912],\n [0.41278201341629, 0.398970007896423],\n [0.257135003805161, 0.355440020561218],\n [0.427684992551804, 0.437960982322693],\n [0.448339998722076, 0.536936044692993],\n [0.178560003638268, 0.45755398273468],\n [0.247308000922203, 0.457193970680237],\n [0.286267012357712, 0.467674970626831],\n [0.332827985286713, 0.460712015628815],\n [0.368755996227264, 0.447206974029541],\n [0.398963987827301, 0.432654976844788],\n [0.476410001516342, 0.405806005001068],\n [0.189241006970406, 0.523923993110657],\n [0.228962004184723, 0.348950982093811],\n [0.490725994110107, 0.562400996685028],\n [0.404670000076294, 0.485132992267609],\n [0.019469000399113, 0.401564002037048],\n [0.426243007183075, 0.420431017875671],\n [0.396993011236191, 0.548797011375427],\n [0.266469985246658, 0.376977026462555],\n [0.439121007919312, 0.51895797252655],\n [0.032313998788595, 0.644356966018677],\n [0.419054001569748, 0.387154996395111],\n [0.462783008813858, 0.505746960639954],\n [0.238978996872902, 0.779744982719421],\n [0.198220998048782, 0.831938028335571],\n [0.107550002634525, 0.540755033493042],\n [0.183610007166862, 0.740257024765015],\n [0.134409993886948, 0.333683013916016],\n [0.385764002799988, 0.883153975009918],\n [0.490967005491257, 0.579378008842468],\n [0.382384985685349, 0.508572995662689],\n [0.174399003386497, 0.397670984268188],\n [0.318785011768341, 0.39623498916626],\n [0.343364000320435, 0.400596976280212],\n [0.396100014448166, 0.710216999053955],\n [0.187885001301765, 0.588537991046906],\n [0.430987000465393, 0.944064974784851],\n [0.318993002176285, 0.898285031318665],\n [0.266247987747192, 0.869701027870178],\n [0.500023007392883, 0.190576016902924],\n [0.499976992607117, 0.954452991485596],\n [0.366169989109039, 0.398822009563446],\n [0.393207013607025, 0.39553701877594],\n [0.410373002290726, 0.391080021858215],\n [0.194993004202843, 0.342101991176605],\n [0.388664990663528, 0.362284004688263],\n [0.365961998701096, 0.355970978736877],\n [0.343364000320435, 0.355356991291046],\n [0.318785011768341, 0.35834002494812],\n [0.301414996385574, 0.363156020641327],\n [0.058132998645306, 0.319076001644135],\n [0.301414996385574, 0.387449026107788],\n [0.499987989664078, 0.618434011936188],\n [0.415838003158569, 0.624195992946625],\n [0.445681989192963, 0.566076993942261],\n [0.465844005346298, 0.620640993118286],\n [0.49992299079895, 0.351523995399475],\n [0.288718998432159, 0.819945991039276],\n [0.335278987884521, 0.852819979190826],\n [0.440512001514435, 0.902418971061707],\n [0.128294005990028, 0.791940987110138],\n [0.408771991729736, 0.373893976211548],\n [0.455606997013092, 0.451801002025604],\n [0.499877005815506, 0.908990025520325],\n [0.375436991453171, 0.924192011356354],\n [0.11421000212431, 0.615022003650665],\n [0.448662012815475, 0.695277988910675],\n [0.4480200111866, 0.704632043838501],\n [0.447111994028091, 0.715808033943176],\n [0.444831997156143, 0.730794012546539],\n [0.430011987686157, 0.766808986663818],\n [0.406787008047104, 0.685672998428345],\n [0.400738000869751, 0.681069016456604],\n [0.392399996519089, 0.677703022956848],\n [0.367855995893478, 0.663918972015381],\n [0.247923001646996, 0.601333022117615],\n [0.452769994735718, 0.420849978923798],\n [0.43639200925827, 0.359887003898621],\n [0.416164010763168, 0.368713974952698],\n [0.413385987281799, 0.692366003990173],\n [0.228018000721931, 0.683571994304657],\n [0.468268007040024, 0.352671027183533],\n [0.411361992359161, 0.804327011108398],\n [0.499989002943039, 0.469825029373169],\n [0.479153990745544, 0.442654013633728],\n [0.499974012374878, 0.439637005329132],\n [0.432112008333206, 0.493588984012604],\n [0.499886006116867, 0.866917014122009],\n [0.49991300702095, 0.821729004383087],\n [0.456548988819122, 0.819200992584229],\n [0.344549000263214, 0.745438992977142],\n [0.37890899181366, 0.574010014533997],\n [0.374292999505997, 0.780184984207153],\n [0.319687992334366, 0.570737957954407],\n [0.357154995203018, 0.604269981384277],\n [0.295284003019333, 0.621580958366394],\n [0.447750002145767, 0.862477004528046],\n [0.410986006259918, 0.508723020553589],\n [0.31395098567009, 0.775308012962341],\n [0.354128003120422, 0.812552988529205],\n [0.324548006057739, 0.703992962837219],\n [0.189096003770828, 0.646299958229065],\n [0.279776990413666, 0.71465802192688],\n [0.1338230073452, 0.682700991630554],\n [0.336768001317978, 0.644733011722565],\n [0.429883986711502, 0.466521978378296],\n [0.455527991056442, 0.548622965812683],\n [0.437114000320435, 0.558896005153656],\n [0.467287987470627, 0.529924988746643],\n [0.414712011814117, 0.335219979286194],\n [0.37704598903656, 0.322777986526489],\n [0.344107985496521, 0.320150971412659],\n [0.312875986099243, 0.32233202457428],\n [0.283526003360748, 0.333190023899078],\n [0.241245999932289, 0.382785975933075],\n [0.102986000478268, 0.468762993812561],\n [0.267612010240555, 0.424560010433197],\n [0.297879010438919, 0.433175981044769],\n [0.333433985710144, 0.433878004550934],\n [0.366427004337311, 0.426115989685059],\n [0.396012008190155, 0.416696012020111],\n [0.420121014118195, 0.41022801399231],\n [0.007561000064015, 0.480777025222778],\n [0.432949006557465, 0.569517970085144],\n [0.458638995885849, 0.479089021682739],\n [0.473466008901596, 0.545744001865387],\n [0.476087987422943, 0.563830018043518],\n [0.468472003936768, 0.555056989192963],\n [0.433990985155106, 0.582361996173859],\n [0.483518004417419, 0.562983989715576],\n [0.482482999563217, 0.57784903049469],\n [0.42645001411438, 0.389798998832703],\n [0.438998997211456, 0.39649498462677],\n [0.450067013502121, 0.400434017181396],\n [0.289712011814117, 0.368252992630005],\n [0.276670008897781, 0.363372981548309],\n [0.517862021923065, 0.471948027610779],\n [0.710287988185883, 0.380764007568359],\n [0.526226997375488, 0.573909997940063],\n [0.895093023777008, 0.254140973091125],\n [0.634069979190826, 0.409575998783112],\n [0.661242008209229, 0.41302502155304],\n [0.688880026340485, 0.409460008144379],\n [0.725341975688934, 0.389131009578705],\n [0.606630027294159, 0.40370500087738],\n [0.654766023159027, 0.344011008739471],\n [0.629905998706818, 0.346076011657715],\n [0.680678009986877, 0.347265005111694],\n [0.702096998691559, 0.353591024875641],\n [0.75221198797226, 0.410804986953735],\n [0.602918028831482, 0.842862963676453],\n [0.719901978969574, 0.375599980354309],\n [0.893692970275879, 0.399959981441498],\n [0.790081977844238, 0.391354024410248],\n [0.643998026847839, 0.534487962722778],\n [0.528249025344849, 0.65040397644043],\n [0.525849997997284, 0.680191040039062],\n [0.560214996337891, 0.657229006290436],\n [0.585384011268616, 0.66654098033905],\n [0.549625992774963, 0.680860996246338],\n [0.57122802734375, 0.682691991329193],\n [0.624852001667023, 0.72809898853302],\n [0.513050019741058, 0.547281980514526],\n [0.51509702205658, 0.527251958847046],\n [0.742246985435486, 0.314507007598877],\n [0.598631024360657, 0.454979002475739],\n [0.570338010787964, 0.548575043678284],\n [0.578631997108459, 0.533622980117798],\n [0.723087012767792, 0.532054007053375],\n [0.516445994377136, 0.499638974666595],\n [0.662801027297974, 0.282917976379395],\n [0.70362401008606, 0.293271005153656],\n [0.830704987049103, 0.193813979625702],\n [0.552385985851288, 0.302568018436432],\n [0.607609987258911, 0.353887975215912],\n [0.645429015159607, 0.696707010269165],\n [0.932694971561432, 0.730105042457581],\n [0.557260990142822, 0.572826027870178],\n [0.542901992797852, 0.584792017936707],\n [0.6180260181427, 0.694710969924927],\n [0.607590973377228, 0.694203019142151],\n [0.722943007946014, 0.271963000297546],\n [0.577413976192474, 0.563166975975037],\n [0.614082992076874, 0.281386971473694],\n [0.616907000541687, 0.255886018276215],\n [0.668509006500244, 0.119913995265961],\n [0.770092010498047, 0.232020974159241],\n [0.635536015033722, 0.189248979091644],\n [0.77039098739624, 0.299556016921997],\n [0.826722025871277, 0.278755009174347],\n [0.527121007442474, 0.666198015213013],\n [0.553171992301941, 0.668527007102966],\n [0.577238023281097, 0.673889994621277],\n [0.554691970348358, 0.580065965652466],\n [0.611896991729736, 0.693961024284363],\n [0.59696102142334, 0.706539988517761],\n [0.596370995044708, 0.693953037261963],\n [0.539958000183105, 0.557139039039612],\n [0.568841993808746, 0.692366003990173],\n [0.547818005084991, 0.692366003990173],\n [0.52461302280426, 0.692366003990173],\n [0.534089982509613, 0.779141008853912],\n [0.527670979499817, 0.736225962638855],\n [0.526912987232208, 0.717857003211975],\n [0.526877999305725, 0.704625964164734],\n [0.526966989040375, 0.695277988910675],\n [0.572058022022247, 0.695277988910675],\n [0.573521018028259, 0.703539967536926],\n [0.57683801651001, 0.711845993995667],\n [0.581691026687622, 0.720062971115112],\n [0.609944999217987, 0.639909982681274],\n [0.986046016216278, 0.560034036636353],\n [0.5867999792099, 0.69539999961853],\n [0.590372025966644, 0.701822996139526],\n [0.531915009021759, 0.601536989212036],\n [0.577268004417419, 0.585934996604919],\n [0.536915004253387, 0.593786001205444],\n [0.627542972564697, 0.473352015018463],\n [0.665585994720459, 0.495950996875763],\n [0.588353991508484, 0.546862006187439],\n [0.757824003696442, 0.14767599105835],\n [0.709249973297119, 0.201507985591888],\n [0.672684013843536, 0.256581008434296],\n [0.600408971309662, 0.74900496006012],\n [0.55826598405838, 0.261672019958496],\n [0.570303976535797, 0.187870979309082],\n [0.588165998458862, 0.109044015407562],\n [0.711045026779175, 0.398952007293701],\n [0.781069993972778, 0.435405015945435],\n [0.587247014045715, 0.398931980133057],\n [0.742869973182678, 0.355445981025696],\n [0.572156012058258, 0.437651991844177],\n [0.55186802148819, 0.536570012569427],\n [0.821442008018494, 0.457556009292603],\n [0.752701997756958, 0.457181990146637],\n [0.71375697851181, 0.467626988887787],\n [0.66711300611496, 0.460672974586487],\n [0.631101012229919, 0.447153985500336],\n [0.6008620262146, 0.432473003864288],\n [0.523481011390686, 0.405627012252808],\n [0.810747981071472, 0.523926019668579],\n [0.771045982837677, 0.348959028720856],\n [0.509127020835876, 0.562718033790588],\n [0.595292985439301, 0.485023975372314],\n [0.980530977249146, 0.401564002037048],\n [0.573499977588654, 0.420000016689301],\n [0.602994978427887, 0.548687994480133],\n [0.733529984951019, 0.376977026462555],\n [0.560611009597778, 0.519016981124878],\n [0.967685997486115, 0.644356966018677],\n [0.580985009670258, 0.387160003185272],\n [0.537728011608124, 0.505385041236877],\n [0.760966002941132, 0.779752969741821],\n [0.801778972148895, 0.831938028335571],\n [0.892440974712372, 0.54076099395752],\n [0.816350996494293, 0.740260004997253],\n [0.865594983100891, 0.333687007427216],\n [0.614073991775513, 0.883246004581451],\n [0.508952975273132, 0.579437971115112],\n [0.617941975593567, 0.508316040039062],\n [0.825608015060425, 0.397674977779388],\n [0.681214988231659, 0.39623498916626],\n [0.656635999679565, 0.400596976280212],\n [0.603900015354156, 0.710216999053955],\n [0.81208598613739, 0.588539004325867],\n [0.56801301240921, 0.944564998149872],\n [0.681007981300354, 0.898285031318665],\n [0.733752012252808, 0.869701027870178],\n [0.633830010890961, 0.398822009563446],\n [0.606792986392975, 0.39553701877594],\n [0.589659988880157, 0.391062021255493],\n [0.805015981197357, 0.342108011245728],\n [0.611334979534149, 0.362284004688263],\n [0.634037971496582, 0.355970978736877],\n [0.656635999679565, 0.355356991291046],\n [0.681214988231659, 0.35834002494812],\n [0.698584973812103, 0.363156020641327],\n [0.941866993904114, 0.319076001644135],\n [0.698584973812103, 0.387449026107788],\n [0.584177017211914, 0.624107003211975],\n [0.554318010807037, 0.566076993942261],\n [0.534153997898102, 0.62064003944397],\n [0.711217999458313, 0.819975018501282],\n [0.664629995822906, 0.852871000766754],\n [0.559099972248077, 0.902631998062134],\n [0.871706008911133, 0.791940987110138],\n [0.591234028339386, 0.373893976211548],\n [0.544341027736664, 0.451583981513977],\n [0.624562978744507, 0.924192011356354],\n [0.88577002286911, 0.615028977394104],\n [0.551338016986847, 0.695277988910675],\n [0.551980018615723, 0.704632043838501],\n [0.552887976169586, 0.715808033943176],\n [0.555167973041534, 0.730794012546539],\n [0.569944024085999, 0.767035007476807],\n [0.593203008174896, 0.685675978660583],\n [0.599261999130249, 0.681069016456604],\n [0.607599973678589, 0.677703022956848],\n [0.631937980651855, 0.663500010967255],\n [0.752032995223999, 0.601315021514893],\n [0.547226011753082, 0.420395016670227],\n [0.563543975353241, 0.359827995300293],\n [0.583841025829315, 0.368713974952698],\n [0.586614012718201, 0.692366003990173],\n [0.771915018558502, 0.683578014373779],\n [0.531597018241882, 0.352482974529266],\n [0.588370978832245, 0.804440975189209],\n [0.52079701423645, 0.442565023899078],\n [0.567984998226166, 0.493479013442993],\n [0.543282985687256, 0.819254994392395],\n [0.655317008495331, 0.745514988899231],\n [0.621008992195129, 0.574018001556396],\n [0.625559985637665, 0.78031200170517],\n [0.680198013782501, 0.570719003677368],\n [0.64276397228241, 0.604337990283966],\n [0.704662978649139, 0.621529996395111],\n [0.552012026309967, 0.862591981887817],\n [0.589071989059448, 0.508637011051178],\n [0.685944974422455, 0.775357007980347],\n [0.645735025405884, 0.812640011310577],\n [0.675342977046967, 0.703978002071381],\n [0.810858011245728, 0.646304965019226],\n [0.72012197971344, 0.714666962623596],\n [0.866151988506317, 0.682704985141754],\n [0.663187026977539, 0.644596993923187],\n [0.570082008838654, 0.466325998306274],\n [0.544561982154846, 0.548375964164734],\n [0.562758982181549, 0.558784961700439],\n [0.531987011432648, 0.530140042304993],\n [0.585271000862122, 0.335177004337311],\n [0.622952997684479, 0.32277899980545],\n [0.655896008014679, 0.320163011550903],\n [0.687132000923157, 0.322345972061157],\n [0.716481983661652, 0.333200991153717],\n [0.758756995201111, 0.382786989212036],\n [0.897013008594513, 0.468769013881683],\n [0.732392013072968, 0.424547016620636],\n [0.70211398601532, 0.433162987232208],\n [0.66652500629425, 0.433866024017334],\n [0.633504986763, 0.426087975502014],\n [0.603875994682312, 0.416586995124817],\n [0.579657971858978, 0.409945011138916],\n [0.992439985275269, 0.480777025222778],\n [0.567192018032074, 0.569419980049133],\n [0.54136598110199, 0.478899002075195],\n [0.526564002037048, 0.546118021011353],\n [0.523913025856018, 0.563830018043518],\n [0.531529009342194, 0.555056989192963],\n [0.566035985946655, 0.582329034805298],\n [0.51631098985672, 0.563053965568542],\n [0.5174720287323, 0.577877044677734],\n [0.573594987392426, 0.389806985855103],\n [0.560697972774506, 0.395331978797913],\n [0.549755990505219, 0.399751007556915],\n [0.710287988185883, 0.368252992630005],\n [0.723330020904541, 0.363372981548309],\n];\n\nexport const TRI468: Array = [\n 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121, 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,\n 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92, 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,\n 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4, 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,\n 181, 85, 84, 17, 206, 203, 36, 148, 171, 140, 92, 40, 39, 193, 189, 244, 159, 158, 28, 247, 246, 161, 236, 3, 196, 54, 68, 104, 193, 168, 8, 117,\n 228, 31, 189, 193, 55, 98, 97, 99, 126, 47, 100, 166, 79, 218, 155, 154, 26, 209, 49, 131, 135, 136, 150, 47, 126, 217, 223, 52, 53, 45, 51, 134, 211,\n 170, 140, 67, 69, 108, 43, 106, 91, 230, 119, 120, 226, 130, 247, 63, 53, 52, 238, 20, 242, 46, 70, 156, 78, 62, 96, 46, 53, 63, 143, 34, 227, 173,\n 155, 133, 123, 117, 111, 44, 125, 19, 236, 134, 51, 216, 206, 205, 154, 153, 22, 39, 37, 167, 200, 201, 208, 36, 142, 100, 57, 212, 202, 20, 60, 99, 28,\n 158, 157, 35, 226, 113, 160, 159, 27, 204, 202, 210, 113, 225, 46, 43, 202, 204, 62, 76, 77, 137, 123, 116, 41, 38, 72, 203, 129, 142, 64, 98, 240, 49,\n 102, 64, 41, 73, 74, 212, 216, 207, 42, 74, 184, 169, 170, 211, 170, 149, 176, 105, 66, 69, 122, 6, 168, 123, 147, 187, 96, 77, 90, 65, 55, 107, 89,\n 90, 180, 101, 100, 120, 63, 105, 104, 93, 137, 227, 15, 86, 85, 129, 102, 49, 14, 87, 86, 55, 8, 9, 100, 47, 121, 145, 23, 22, 88, 89, 179, 6, 122,\n 196, 88, 95, 96, 138, 172, 136, 215, 58, 172, 115, 48, 219, 42, 80, 81, 195, 3, 51, 43, 146, 61, 171, 175, 199, 81, 82, 38, 53, 46, 225, 144, 163, 110,\n 246, 33, 7, 52, 65, 66, 229, 228, 117, 34, 127, 234, 107, 108, 69, 109, 108, 151, 48, 64, 235, 62, 78, 191, 129, 209, 126, 111, 35, 143, 163, 161, 246,\n 117, 123, 50, 222, 65, 52, 19, 125, 141, 221, 55, 65, 3, 195, 197, 25, 7, 33, 220, 237, 44, 70, 71, 139, 122, 193, 245, 247, 130, 33, 71, 21, 162,\n 153, 158, 159, 170, 169, 150, 188, 174, 196, 216, 186, 92, 144, 160, 161, 2, 97, 167, 141, 125, 241, 164, 167, 37, 72, 38, 12, 145, 159, 160, 38, 82, 13,\n 63, 68, 71, 226, 35, 111, 158, 153, 154, 101, 50, 205, 206, 92, 165, 209, 198, 217, 165, 167, 97, 220, 115, 218, 133, 112, 243, 239, 238, 241, 214,\n 135, 169, 190, 173, 133, 171, 208, 32, 125, 44, 237, 86, 87, 178, 85, 86, 179, 84, 85, 180, 83, 84, 181, 201, 83, 182, 137, 93, 132, 76, 62, 183, 61,\n 76, 184, 57, 61, 185, 212, 57, 186, 214, 207, 187, 34, 143, 156, 79, 239, 237, 123, 137, 177, 44, 1, 4, 201, 194, 32, 64, 102, 129, 213, 215, 138, 59,\n 166, 219, 242, 99, 97, 2, 94, 141, 75, 59, 235, 24, 110, 228, 25, 130, 226, 23, 24, 229, 22, 23, 230, 26, 22, 231, 112, 26, 232, 189, 190, 243, 221, 56,\n 190, 28, 56, 221, 27, 28, 222, 29, 27, 223, 30, 29, 224, 247, 30, 225, 238, 79, 20, 166, 59, 75, 60, 75, 240, 147, 177, 215, 20, 79, 166, 187, 147, 213,\n 112, 233, 244, 233, 128, 245, 128, 114, 188, 114, 217, 174, 131, 115, 220, 217, 198, 236, 198, 131, 134, 177, 132, 58, 143, 35, 124, 110, 163, 7, 228,\n 110, 25, 356, 389, 368, 11, 302, 267, 452, 350, 349, 302, 303, 269, 357, 343, 277, 452, 453, 357, 333, 332, 297, 175, 152, 377, 384, 398, 382, 347,\n 348, 330, 303, 304, 270, 9, 336, 337, 278, 279, 360, 418, 262, 431, 304, 408, 409, 310, 415, 407, 270, 409, 410, 450, 348, 347, 422, 430, 434, 313,\n 314, 17, 306, 307, 375, 387, 388, 260, 286, 414, 398, 335, 406, 418, 364, 367, 416, 423, 358, 327, 251, 284, 298, 281, 5, 4, 373, 374, 253, 307, 320,\n 321, 425, 427, 411, 421, 313, 18, 321, 405, 406, 320, 404, 405, 315, 16, 17, 426, 425, 266, 377, 400, 369, 322, 391, 269, 417, 465, 464, 386, 257, 258,\n 466, 260, 388, 456, 399, 419, 284, 332, 333, 417, 285, 8, 346, 340, 261, 413, 441, 285, 327, 460, 328, 355, 371, 329, 392, 439, 438, 382, 341, 256,\n 429, 420, 360, 364, 394, 379, 277, 343, 437, 443, 444, 283, 275, 440, 363, 431, 262, 369, 297, 338, 337, 273, 375, 321, 450, 451, 349, 446, 342, 467,\n 293, 334, 282, 458, 461, 462, 276, 353, 383, 308, 324, 325, 276, 300, 293, 372, 345, 447, 382, 398, 362, 352, 345, 340, 274, 1, 19, 456, 248, 281, 436,\n 427, 425, 381, 256, 252, 269, 391, 393, 200, 199, 428, 266, 330, 329, 287, 273, 422, 250, 462, 328, 258, 286, 384, 265, 353, 342, 387, 259, 257, 424,\n 431, 430, 342, 353, 276, 273, 335, 424, 292, 325, 307, 366, 447, 345, 271, 303, 302, 423, 266, 371, 294, 455, 460, 279, 278, 294, 271, 272, 304, 432,\n 434, 427, 272, 407, 408, 394, 430, 431, 395, 369, 400, 334, 333, 299, 351, 417, 168, 352, 280, 411, 325, 319, 320, 295, 296, 336, 319, 403, 404, 330,\n 348, 349, 293, 298, 333, 323, 454, 447, 15, 16, 315, 358, 429, 279, 14, 15, 316, 285, 336, 9, 329, 349, 350, 374, 380, 252, 318, 402, 403, 6, 197, 419,\n 318, 319, 325, 367, 364, 365, 435, 367, 397, 344, 438, 439, 272, 271, 311, 195, 5, 281, 273, 287, 291, 396, 428, 199, 311, 271, 268, 283, 444, 445,\n 373, 254, 339, 263, 466, 249, 282, 334, 296, 449, 347, 346, 264, 447, 454, 336, 296, 299, 338, 10, 151, 278, 439, 455, 292, 407, 415, 358, 371, 355,\n 340, 345, 372, 390, 249, 466, 346, 347, 280, 442, 443, 282, 19, 94, 370, 441, 442, 295, 248, 419, 197, 263, 255, 359, 440, 275, 274, 300, 383, 368,\n 351, 412, 465, 263, 467, 466, 301, 368, 389, 380, 374, 386, 395, 378, 379, 412, 351, 419, 436, 426, 322, 373, 390, 388, 2, 164, 393, 370, 462, 461,\n 164, 0, 267, 302, 11, 12, 374, 373, 387, 268, 12, 13, 293, 300, 301, 446, 261, 340, 385, 384, 381, 330, 266, 425, 426, 423, 391, 429, 355, 437, 391,\n 327, 326, 440, 457, 438, 341, 382, 362, 459, 457, 461, 434, 430, 394, 414, 463, 362, 396, 369, 262, 354, 461, 457, 316, 403, 402, 315, 404, 403, 314,\n 405, 404, 313, 406, 405, 421, 418, 406, 366, 401, 361, 306, 408, 407, 291, 409, 408, 287, 410, 409, 432, 436, 410, 434, 416, 411, 264, 368, 383, 309,\n 438, 457, 352, 376, 401, 274, 275, 4, 421, 428, 262, 294, 327, 358, 433, 416, 367, 289, 455, 439, 462, 370, 326, 2, 326, 370, 305, 460, 455, 254,\n 449, 448, 255, 261, 446, 253, 450, 449, 252, 451, 450, 256, 452, 451, 341, 453, 452, 413, 464, 463, 441, 413, 414, 258, 442, 441, 257, 443, 442, 259,\n 444, 443, 260, 445, 444, 467, 342, 445, 459, 458, 250, 289, 392, 290, 290, 328, 460, 376, 433, 435, 250, 290, 392, 411, 416, 433, 341, 463, 464, 453,\n 464, 465, 357, 465, 412, 343, 412, 399, 360, 363, 440, 437, 399, 456, 420, 456, 363, 401, 435, 288, 372, 383, 353, 339, 255, 249, 448, 261, 255, 133,\n 243, 190, 133, 155, 112, 33, 246, 247, 33, 130, 25, 398, 384, 286, 362, 398, 414, 362, 463, 341, 263, 359, 467, 263, 249, 255, 466, 467, 260, 75, 60,\n 166, 238, 239, 79, 162, 127, 139, 72, 11, 37, 121, 232, 120, 73, 72, 39, 114, 128, 47, 233, 232, 128, 103, 104, 67, 152, 175, 148, 173, 157, 155,\n 119, 118, 101, 74, 73, 40, 107, 9, 108, 49, 48, 131, 32, 194, 211, 184, 74, 185, 191, 80, 183, 185, 40, 186, 119, 230, 118, 210, 202, 214, 84, 83, 17,\n 77, 76, 146, 161, 160, 30, 190, 56, 173, 182, 106, 194, 138, 135, 192, 129, 203, 98, 54, 21, 68, 5, 51, 4, 145, 144, 23, 90, 77, 91, 207, 205, 187, 83,\n 201, 18, 181, 91, 182, 180, 90, 181, 16, 85, 17, 205, 206, 36, 176, 148, 140, 165, 92, 39, 245, 193, 244, 27, 159, 28, 30, 247, 161, 174, 236, 196,\n 103, 54, 104, 55, 193, 8, 111, 117, 31, 221, 189, 55, 240, 98, 99, 142, 126, 100, 219, 166, 218, 112, 155, 26, 198, 209, 131, 169, 135, 150, 114, 47,\n 217, 224, 223, 53, 220, 45, 134, 32, 211, 140, 109, 67, 108, 146, 43, 91, 231, 230, 120, 113, 226, 247, 105, 63, 52, 241, 238, 242, 124, 46, 156, 95,\n 78, 96, 70, 46, 63, 116, 143, 227, 116, 123, 111, 1, 44, 19, 3, 236, 51, 207, 216, 205, 26, 154, 22, 165, 39, 167, 199, 200, 208, 101, 36, 100, 43,\n 57, 202, 242, 20, 99, 56, 28, 157, 124, 35, 113, 29, 160, 27, 211, 204, 210, 124, 113, 46, 106, 43, 204, 96, 62, 77, 227, 137, 116, 73, 41, 72, 36, 203,\n 142, 235, 64, 240, 48, 49, 64, 42, 41, 74, 214, 212, 207, 183, 42, 184, 210, 169, 211, 140, 170, 176, 104, 105, 69, 193, 122, 168, 50, 123, 187, 89, 96,\n 90, 66, 65, 107, 179, 89, 180, 119, 101, 120, 68, 63, 104, 234, 93, 227, 16, 15, 85, 209, 129, 49, 15, 14, 86, 107, 55, 9, 120, 100, 121, 153, 145, 22,\n 178, 88, 179, 197, 6, 196, 89, 88, 96, 135, 138, 136, 138, 215, 172, 218, 115, 219, 41, 42, 81, 5, 195, 51, 57, 43, 61, 208, 171, 199, 41, 81, 38,\n 224, 53, 225, 24, 144, 110, 105, 52, 66, 118, 229, 117, 227, 34, 234, 66, 107, 69, 10, 109, 151, 219, 48, 235, 183, 62, 191, 142, 129, 126, 116, 111,\n 143, 7, 163, 246, 118, 117, 50, 223, 222, 52, 94, 19, 141, 222, 221, 65, 196, 3, 197, 45, 220, 44, 156, 70, 139, 188, 122, 245, 139, 71, 162, 145,\n 153, 159, 149, 170, 150, 122, 188, 196, 206, 216, 92, 163, 144, 161, 164, 2, 167, 242, 141, 241, 0, 164, 37, 11, 72, 12, 144, 145, 160, 12, 38, 13, 70,\n 63, 71, 31, 226, 111, 157, 158, 154, 36, 101, 205, 203, 206, 165, 126, 209, 217, 98, 165, 97, 237, 220, 218, 237, 239, 241, 210, 214, 169, 140, 171, 32,\n 241, 125, 237, 179, 86, 178, 180, 85, 179, 181, 84, 180, 182, 83, 181, 194, 201, 182, 177, 137, 132, 184, 76, 183, 185, 61, 184, 186, 57, 185, 216, 212,\n 186, 192, 214, 187, 139, 34, 156, 218, 79, 237, 147, 123, 177, 45, 44, 4, 208, 201, 32, 98, 64, 129, 192, 213, 138, 235, 59, 219, 141, 242, 97, 97, 2,\n 141, 240, 75, 235, 229, 24, 228, 31, 25, 226, 230, 23, 229, 231, 22, 230, 232, 26, 231, 233, 112, 232, 244, 189, 243, 189, 221, 190, 222, 28, 221,\n 223, 27, 222, 224, 29, 223, 225, 30, 224, 113, 247, 225, 99, 60, 240, 213, 147, 215, 60, 20, 166, 192, 187, 213, 243, 112, 244, 244, 233, 245, 245,\n 128, 188, 188, 114, 174, 134, 131, 220, 174, 217, 236, 236, 198, 134, 215, 177, 58, 156, 143, 124, 25, 110, 7, 31, 228, 25, 264, 356, 368, 0, 11, 267,\n 451, 452, 349, 267, 302, 269, 350, 357, 277, 350, 452, 357, 299, 333, 297, 396, 175, 377, 381, 384, 382, 280, 347, 330, 269, 303, 270, 151, 9, 337,\n 344, 278, 360, 424, 418, 431, 270, 304, 409, 272, 310, 407, 322, 270, 410, 449, 450, 347, 432, 422, 434, 18, 313, 17, 291, 306, 375, 259, 387, 260,\n 424, 335, 418, 434, 364, 416, 391, 423, 327, 301, 251, 298, 275, 281, 4, 254, 373, 253, 375, 307, 321, 280, 425, 411, 200, 421, 18, 335, 321, 406,\n 321, 320, 405, 314, 315, 17, 423, 426, 266, 396, 377, 369, 270, 322, 269, 413, 417, 464, 385, 386, 258, 248, 456, 419, 298, 284, 333, 168, 417, 8,\n 448, 346, 261, 417, 413, 285, 326, 327, 328, 277, 355, 329, 309, 392, 438, 381, 382, 256, 279, 429, 360, 365, 364, 379, 355, 277, 437, 282, 443, 283,\n 281, 275, 363, 395, 431, 369, 299, 297, 337, 335, 273, 321, 348, 450, 349, 359, 446, 467, 283, 293, 282, 250, 458, 462, 300, 276, 383, 292, 308, 325,\n 283, 276, 293, 264, 372, 447, 346, 352, 340, 354, 274, 19, 363, 456, 281, 426, 436, 425, 380, 381, 252, 267, 269, 393, 421, 200, 428, 371, 266, 329,\n 432, 287, 422, 290, 250, 328, 385, 258, 384, 446, 265, 342, 386, 387, 257, 422, 424, 430, 445, 342, 276, 422, 273, 424, 306, 292, 307, 352, 366, 345,\n 268, 271, 302, 358, 423, 371, 327, 294, 460, 331, 279, 294, 303, 271, 304, 436, 432, 427, 304, 272, 408, 395, 394, 431, 378, 395, 400, 296, 334, 299,\n 6, 351, 168, 376, 352, 411, 307, 325, 320, 285, 295, 336, 320, 319, 404, 329, 330, 349, 334, 293, 333, 366, 323, 447, 316, 15, 315, 331, 358, 279,\n 317, 14, 316, 8, 285, 9, 277, 329, 350, 253, 374, 252, 319, 318, 403, 351, 6, 419, 324, 318, 325, 397, 367, 365, 288, 435, 397, 278, 344, 439, 310,\n 272, 311, 248, 195, 281, 375, 273, 291, 175, 396, 199, 312, 311, 268, 276, 283, 445, 390, 373, 339, 295, 282, 296, 448, 449, 346, 356, 264, 454, 337,\n 336, 299, 337, 338, 151, 294, 278, 455, 308, 292, 415, 429, 358, 355, 265, 340, 372, 388, 390, 466, 352, 346, 280, 295, 442, 282, 354, 19, 370, 285,\n 441, 295, 195, 248, 197, 457, 440, 274, 301, 300, 368, 417, 351, 465, 251, 301, 389, 385, 380, 386, 394, 395, 379, 399, 412, 419, 410, 436, 322, 387,\n 373, 388, 326, 2, 393, 354, 370, 461, 393, 164, 267, 268, 302, 12, 386, 374, 387, 312, 268, 13, 298, 293, 301, 265, 446, 340, 380, 385, 381, 280, 330,\n 425, 322, 426, 391, 420, 429, 437, 393, 391, 326, 344, 440, 438, 458, 459, 461, 364, 434, 394, 428, 396, 262, 274, 354, 457, 317, 316, 402, 316, 315,\n 403, 315, 314, 404, 314, 313, 405, 313, 421, 406, 323, 366, 361, 292, 306, 407, 306, 291, 408, 291, 287, 409, 287, 432, 410, 427, 434, 411, 372, 264,\n 383, 459, 309, 457, 366, 352, 401, 1, 274, 4, 418, 421, 262, 331, 294, 358, 435, 433, 367, 392, 289, 439, 328, 462, 326, 94, 2, 370, 289, 305, 455, 339,\n 254, 448, 359, 255, 446, 254, 253, 449, 253, 252, 450, 252, 256, 451, 256, 341, 452, 414, 413, 463, 286, 441, 414, 286, 258, 441, 258, 257, 442, 257,\n 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305, 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,\n 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360, 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];\n\nexport const TRI68: Array = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59,\n 8, 9, 56, 8, 56, 57, 8, 57, 58, 9, 10, 55, 9, 55, 56, 10, 11, 54, 10, 54, 55, 11, 12, 54, 12, 13, 54, 13, 14, 35, 13, 35, 54, 14, 15, 46, 14, 46, 35, 15, 16,\n 45, 15, 45, 46, 16, 26, 45, 17, 36, 18, 18, 37, 19, 18, 36, 37, 19, 38, 20, 19, 37, 38, 20, 39, 21, 20, 38, 39, 21, 39, 27, 22, 42, 23, 22, 27, 42, 23, 43, 24,\n 23, 42, 43, 24, 44, 25, 24, 43, 44, 25, 45, 26, 25, 44, 45, 27, 39, 28, 27, 28, 42, 28, 39, 29, 28, 29, 42, 29, 31, 30, 29, 30, 35, 29, 40, 31, 29, 35, 47, 29,\n 39, 40, 29, 47, 42, 30, 31, 32, 30, 32, 33, 30, 33, 34, 30, 34, 35, 31, 50, 32, 31, 40, 41, 31, 48, 49, 31, 49, 50, 32, 51, 33, 32, 50, 51, 33, 51, 34, 34, 52,\n 35, 34, 51, 52, 35, 46, 47, 35, 52, 53, 35, 53, 54, 36, 41, 37, 37, 40, 38, 37, 41, 40, 38, 40, 39, 42, 47, 43, 43, 47, 44, 44, 46, 45, 44, 47, 46, 48, 60, 49,\n 48, 59, 60, 49, 61, 50, 49, 60, 61, 50, 62, 51, 50, 61, 62, 51, 62, 52, 52, 63, 53, 52, 62, 63, 53, 64, 54, 53, 63, 64, 54, 64, 55, 55, 65, 56, 55, 64, 65, 56,\n 66, 57, 56, 65, 66, 57, 66, 58, 58, 67, 59, 58, 66, 67, 59, 67, 60, 60, 67, 61, 61, 66, 62, 61, 67, 66, 62, 66, 63, 63, 65, 64, 63, 66, 65, 21, 27, 22];\n\nexport const TRI33: Array = [\n /* eyes */ 0, 8, 7, 7, 8, 1, 2, 10, 9, 9, 10, 3,\n /* brows */ 17, 0, 18, 18, 0, 7, 18, 7, 19, 19, 7, 1, 19, 1, 11, 19, 11, 20, 21, 3, 22, 21, 9, 3, 20, 9, 21, 20, 2, 9, 20, 11, 2,\n /* 4head */ 23, 17, 18, 25, 21, 22, 24, 19, 20, 24, 18, 19, 24, 20, 21, 24, 23, 18, 24, 21, 25,\n /* nose */ 11, 12, 4, 11, 4, 13, 1, 12, 11, 11, 13, 2, 12, 14, 4, 4, 14, 13,\n /* up-lip */ 14, 5, 15, 14, 15, 6, 12, 5, 14, 14, 6, 13,\n /* cheeks */ 8, 12, 1, 2, 13, 10, 8, 26, 12, 10, 13, 27, 26, 5, 12, 13, 6, 27, 0, 26, 8, 10, 27, 3,\n /* chin */ 5, 32, 16, 16, 32, 6, 5, 30, 32, 6, 32, 31,\n /* cont */ 26, 30, 5, 27, 6, 31, 0, 28, 26, 3, 27, 29, 17, 28, 0, 3, 29, 22, 23, 28, 17, 22, 29, 25, 28, 30, 26, 27, 31, 29,\n];\n\nexport const TRI7: Array = [0, 4, 1, 2, 4, 3, 4, 5, 6];\n\nexport const VTX68: Array = [\n /* cont */ 127, 234, 132, 58, 172, 150, 149, 148, 152, 377, 378, 379, 397, 288, 361, 454, 356,\n /* brows */ 70, 63, 105, 66, 107, 336, 296, 334, 293, 300,\n /* nose */ 168, 6, 195, 4, 98, 97, 2, 326, 327,\n /* eyes */ 33, 160, 158, 133, 153, 144, 362, 385, 387, 263, 373, 380,\n /* lip */ 57, 40, 37, 0, 267, 270, 287, 321, 314, 17, 84, 91,\n /* mouth */ 78, 81, 13, 311, 308, 402, 14, 178,\n];\n\nexport const VTX33: Array = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152];\n\nexport const VTX7: Array = [33, 133, 362, 263, 1, 78, 308];\n\nexport const UV68 = VTX68.map((x) => UV468[x]);\n\nexport const UV33 = VTX33.map((x) => UV468[x]);\n\nexport const UV7 = VTX7.map((x) => UV468[x]);\n", "/**\n * BlazeFace, FaceMesh & Iris model implementation\n * See `facemesh.ts` for entry point\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as coords from './facemeshcoords';\nimport type { Box, Point } from '../result';\n\nexport const createBox = (startEndTensor) => ({ startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]), endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2]) });\n\nexport const disposeBox = (t) => tf.dispose([t.startPoint, t.endPoint]);\n\nexport const getBoxSize = (box): [number, number] => [Math.abs(box.endPoint[0] - box.startPoint[0]), Math.abs(box.endPoint[1] - box.startPoint[1])];\n\nexport const getBoxCenter = (box): [number, number] => [box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2, box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2];\n\nexport const getClampedBox = (box, input): Box => (box ? [\n Math.trunc(Math.max(0, box.startPoint[0])),\n Math.trunc(Math.max(0, box.startPoint[1])),\n Math.trunc(Math.min((input.shape[2] || 0), box.endPoint[0]) - Math.max(0, box.startPoint[0])),\n Math.trunc(Math.min((input.shape[1] || 0), box.endPoint[1]) - Math.max(0, box.startPoint[1])),\n] : [0, 0, 0, 0]);\n\nexport const getRawBox = (box, input): Box => (box ? [\n box.startPoint[0] / (input.shape[2] || 0),\n box.startPoint[1] / (input.shape[1] || 0),\n (box.endPoint[0] - box.startPoint[0]) / (input.shape[2] || 0),\n (box.endPoint[1] - box.startPoint[1]) / (input.shape[1] || 0),\n] : [0, 0, 0, 0]);\n\nexport const scaleBoxCoordinates = (box, factor) => {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n return { startPoint, endPoint, landmarks: box.landmarks, confidence: box.confidence };\n};\n\nexport const cutBoxFromImageAndResize = (box, image, cropSize) => {\n const h = image.shape[1];\n const w = image.shape[2];\n const crop = tf.image.cropAndResize(image, [[box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h, box.endPoint[0] / w]], [0], cropSize);\n const norm = tf.div(crop, 255);\n tf.dispose(crop);\n return norm;\n};\n\nexport const enlargeBox = (box, factor) => {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const halfSize: [number, number] = [factor * size[0] / 2, factor * size[1] / 2];\n return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]] as Point, endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]] as Point, landmarks: box.landmarks, confidence: box.confidence };\n};\n\nexport const squarifyBox = (box) => {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const halfSize = Math.max(...size) / 2;\n return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)] as Point, endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)] as Point, landmarks: box.landmarks, confidence: box.confidence };\n};\n\nexport const calculateLandmarksBoundingBox = (landmarks) => {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n return { startPoint: [Math.min(...xs), Math.min(...ys)] as Point, endPoint: [Math.max(...xs), Math.max(...ys)] as Point, landmarks };\n};\n\nexport const fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];\n\nexport const normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n\nexport const computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));\n\nexport const radToDegrees = (rad) => rad * 180 / Math.PI;\n\nexport const buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\n\nexport const dot = (v1: number[], v2: number[]) => {\n let product = 0;\n for (let i = 0; i < v1.length; i++) product += v1[i] * v2[i];\n return product;\n};\n\nexport const getColumnFrom2DArr = (arr, columnIndex) => {\n const column: Array = [];\n for (let i = 0; i < arr.length; i++) column.push(arr[i][columnIndex]);\n return column;\n};\n\nexport const multiplyTransformMatrices = (mat1, mat2) => {\n const product: Array = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n return product;\n};\n\nexport const buildRotationMatrix = (rotation, center) => {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n};\n\nexport const invertTransformMatrix = (matrix) => {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [-dot(rotationComponent[0], translationComponent), -dot(rotationComponent[1], translationComponent)];\n return [rotationComponent[0].concat(invertedTranslation[0]), rotationComponent[1].concat(invertedTranslation[1]), [0, 0, 1]];\n};\n\nexport const rotatePoint = (homogeneousCoordinate, rotationMatrix) => [dot(homogeneousCoordinate, rotationMatrix[0]), dot(homogeneousCoordinate, rotationMatrix[1])];\n\nexport const xyDistanceBetweenPoints = (a, b) => Math.sqrt(((a[0] - b[0]) ** 2) + ((a[1] - b[1]) ** 2));\n\nexport function generateAnchors(inputSize) {\n const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };\n const anchors: Array<[number, number]> = [];\n for (let i = 0; i < spec.strides.length; i++) {\n const stride = spec.strides[i];\n const gridRows = Math.floor((inputSize + stride - 1) / stride);\n const gridCols = Math.floor((inputSize + stride - 1) / stride);\n const anchorsNum = spec.anchors[i];\n for (let gridY = 0; gridY < gridRows; gridY++) {\n const anchorY = stride * (gridY + 0.5);\n for (let gridX = 0; gridX < gridCols; gridX++) {\n const anchorX = stride * (gridX + 0.5);\n for (let n = 0; n < anchorsNum; n++) anchors.push([anchorX, anchorY]);\n }\n }\n }\n return anchors;\n}\n\nexport function transformRawCoords(coordsRaw, box, angle, rotationMatrix, inputSize) {\n const boxSize = getBoxSize(box);\n const coordsScaled = coordsRaw.map((coord) => ([ // scaled around zero-point\n boxSize[0] / inputSize * (coord[0] - inputSize / 2),\n boxSize[1] / inputSize * (coord[1] - inputSize / 2),\n coord[2] || 0,\n ]));\n const largeAngle = angle && (angle !== 0) && (Math.abs(angle) > 0.2);\n const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;\n const coordsRotated = largeAngle ? coordsScaled.map((coord) => ([...rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;\n const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;\n const boxCenter = [...getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];\n return coordsRotated.map((coord) => ([\n Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),\n Math.round(coord[1] + dot(boxCenter, inverseRotationMatrix[1])),\n Math.round(coord[2] || 0),\n ]));\n}\n\nexport function correctFaceRotation(box, input, inputSize) {\n const symmetryLine = (box.landmarks.length >= coords.meshLandmarks.count) ? coords.meshLandmarks.symmetryLine : coords.blazeFaceLandmarks.symmetryLine;\n const angle: number = computeRotation(box.landmarks[symmetryLine[0]], box.landmarks[symmetryLine[1]]);\n const largeAngle = angle && (angle !== 0) && (Math.abs(angle) > 0.2);\n let rotationMatrix;\n let face;\n if (largeAngle) {\n const faceCenter: Point = getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });\n const faceCenterNormalized: Point = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];\n const rotated = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); // rotateWithOffset is not defined for tfjs-node\n rotationMatrix = buildRotationMatrix(-angle, faceCenter);\n face = cutBoxFromImageAndResize(box, rotated, [inputSize, inputSize]);\n tf.dispose(rotated);\n } else {\n rotationMatrix = fixedRotationMatrix;\n face = cutBoxFromImageAndResize(box, input, [inputSize, inputSize]);\n }\n return [angle, rotationMatrix, face];\n}\n", "/**\n * BlazeFace, FaceMesh & Iris model implementation\n * See `facemesh.ts` for entry point\n */\n\nimport { log, join } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as util from './facemeshutil';\nimport type { Config } from '../config';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport { env } from '../util/env';\nimport type { Point } from '../result';\n\nconst keypointsCount = 6;\nlet model: GraphModel | null;\nlet anchorsData: [number, number][] = [];\nlet anchors: Tensor | null = null;\nlet inputSize = 0;\n\n// export const size = () => (model && model.inputs[0].shape ? model.inputs[0].shape[2] : 0);\nexport const size = () => inputSize;\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.detector?.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.detector?.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;\n if (inputSize === -1) inputSize = 64;\n anchorsData = util.generateAnchors(inputSize);\n anchors = tf.tensor2d(anchorsData);\n return model;\n}\n\nfunction decodeBounds(boxOutputs) {\n const t: Record = {};\n t.boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);\n t.centers = tf.add(t.boxStarts, anchors);\n t.boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);\n t.boxSizesNormalized = tf.div(t.boxSizes, inputSize);\n t.centersNormalized = tf.div(t.centers, inputSize);\n t.halfBoxSize = tf.div(t.boxSizesNormalized, 2);\n t.starts = tf.sub(t.centersNormalized, t.halfBoxSize);\n t.ends = tf.add(t.centersNormalized, t.halfBoxSize);\n t.startNormalized = tf.mul(t.starts, inputSize);\n t.endNormalized = tf.mul(t.ends, inputSize);\n const boxes = tf.concat2d([t.startNormalized, t.endNormalized], 1);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return boxes;\n}\n\nexport async function getBoxes(inputImage: Tensor, config: Config) {\n // sanity check on input\n if ((!inputImage) || (inputImage['isDisposedInternal']) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return { boxes: [] };\n const t: Record = {};\n\n t.resized = tf.image.resizeBilinear(inputImage, [inputSize, inputSize]);\n t.div = tf.div(t.resized, 127.5);\n t.normalized = tf.sub(t.div, 0.5);\n const res = model?.execute(t.normalized) as Tensor[];\n if (Array.isArray(res)) { // are we using tfhub or pinto converted model?\n const sorted = res.sort((a, b) => a.size - b.size);\n t.concat384 = tf.concat([sorted[0], sorted[2]], 2); // dim: 384, 1 + 16\n t.concat512 = tf.concat([sorted[1], sorted[3]], 2); // dim: 512, 1 + 16\n t.concat = tf.concat([t.concat512, t.concat384], 1);\n t.batch = tf.squeeze(t.concat, 0);\n } else {\n t.batch = tf.squeeze(res); // when using tfhub model\n }\n tf.dispose(res);\n t.boxes = decodeBounds(t.batch);\n t.logits = tf.slice(t.batch, [0, 0], [-1, 1]);\n t.sigmoid = tf.sigmoid(t.logits);\n t.scores = tf.squeeze(t.sigmoid);\n\n t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.scores, (config.face.detector?.maxDetected || 0), (config.face.detector?.iouThreshold || 0), (config.face.detector?.minConfidence || 0));\n const nms = await t.nms.array() as number[];\n const boxes: Array<{ box: { startPoint: Point, endPoint: Point }, landmarks: Point[], confidence: number }> = [];\n const scores = await t.scores.data();\n for (let i = 0; i < nms.length; i++) {\n const confidence = scores[nms[i]];\n if (confidence > (config.face.detector?.minConfidence || 0)) {\n const b: Record = {};\n b.bbox = tf.slice(t.boxes, [nms[i], 0], [1, -1]);\n b.slice = tf.slice(t.batch, [nms[i], keypointsCount - 1], [1, -1]);\n b.squeeze = tf.squeeze(b.slice);\n b.landmarks = tf.reshape(b.squeeze, [keypointsCount, -1]);\n b.startPoint = tf.slice(b.bbox, [0, 0], [-1, 2]);\n b.endPoint = tf.slice(b.bbox, [0, 2], [-1, 2]);\n boxes.push({\n box: {\n startPoint: (await b.startPoint.data()) as unknown as Point,\n endPoint: (await b.endPoint.data()) as unknown as Point,\n },\n landmarks: (await b.landmarks.array()) as Point[],\n confidence,\n });\n Object.keys(b).forEach((tensor) => tf.dispose(b[tensor]));\n }\n }\n\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return { boxes, scaleFactor: [inputImage.shape[2] / inputSize, inputImage.shape[1] / inputSize] };\n}\n", "/* eslint-disable no-multi-spaces */\n\nexport const kpt: Array = [\n 'nose', // 0\n 'leftEyeInside', // 1\n 'leftEye', // 2\n 'leftEyeOutside', // 3\n 'rightEyeInside', // 4\n 'rightEye', // 5\n 'rightEyeOutside', // 6\n 'leftEar', // 7\n 'rightEar', // 8\n 'leftMouth', // 9\n 'rightMouth', // 10\n 'leftShoulder', // 11\n 'rightShoulder', // 12\n 'leftElbow', // 13\n 'rightElbow', // 14\n 'leftWrist', // 15\n 'rightWrist', // 16\n 'leftPalm', // 17\n 'rightPalm', // 18\n 'leftIndex', // 19\n 'rightIndex', // 20\n 'leftPinky', // 21\n 'rightPinky', // 22\n 'leftHip', // 23\n 'rightHip', // 24\n 'leftKnee', // 25\n 'rightKnee', // 26\n 'leftAnkle', // 27\n 'rightAnkle', // 28\n 'leftHeel', // 29\n 'rightHeel', // 30\n 'leftFoot', // 31\n 'rightFoot', // 32\n 'bodyCenter', // 33\n 'bodyTop', // 34\n 'leftThumb', // 35\n 'leftHand', // 36\n 'rightThumb', // 37\n 'rightHand', // 38\n];\n\nexport const connected: Record = {\n leftLeg: ['leftHip', 'leftKnee', 'leftAnkle', 'leftHeel', 'leftFoot'],\n rightLeg: ['rightHip', 'rightKnee', 'rightAnkle', 'rightHeel', 'rightFoot'],\n torso: ['leftShoulder', 'rightShoulder', 'rightHip', 'leftHip', 'leftShoulder'],\n leftArm: ['leftShoulder', 'leftElbow', 'leftWrist', 'leftPalm'],\n rightArm: ['rightShoulder', 'rightElbow', 'rightWrist', 'rightPalm'],\n leftHand: [],\n rightHand: [],\n head: [],\n};\n", "/**\n * BlazePose model implementation\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport { log, join, now } from '../util/util';\nimport type { BodyKeypoint, BodyResult, Box, Point } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport * as coords from './blazeposecoords';\n\nconst env = { initial: true };\nconst models: [GraphModel | null, GraphModel | null] = [null, null];\nconst inputSize = [[0, 0], [0, 0]];\nlet skipped = Number.MAX_SAFE_INTEGER;\nlet outputNodes: string[]; // different for lite/full/heavy\nlet cache: BodyResult | null = null;\nlet padding: [number, number][] = [[0, 0], [0, 0], [0, 0], [0, 0]];\nlet lastTime = 0;\n\nexport async function loadDetect(config: Config): Promise {\n if (env.initial) models[0] = null;\n if (!models[0] && config.body.detector?.modelPath || '') {\n models[0] = await tf.loadGraphModel(join(config.modelBasePath, config.body.detector?.modelPath || '')) as unknown as GraphModel;\n const inputs = Object.values(models[0].modelSignature['inputs']);\n inputSize[0][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;\n inputSize[0][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;\n if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.body.detector?.modelPath);\n else if (config.debug) log('load model:', models[0]['modelUrl']);\n } else if (config.debug && models[0]) log('cached model:', models[0]['modelUrl']);\n return models[0] as GraphModel;\n}\n\nexport async function loadPose(config: Config): Promise {\n if (env.initial) models[1] = null;\n if (!models[1]) {\n models[1] = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath || '')) as unknown as GraphModel;\n const inputs = Object.values(models[1].modelSignature['inputs']);\n inputSize[1][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;\n inputSize[1][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;\n if (config.body.modelPath?.includes('lite')) outputNodes = ['ld_3d', 'output_segmentation', 'output_heatmap', 'world_3d', 'output_poseflag'];\n else outputNodes = ['Identity', 'Identity_2', 'Identity_3', 'Identity_4', 'Identity_1']; // v2 from pinto full and heavy\n if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.body.modelPath);\n else if (config.debug) log('load model:', models[1]['modelUrl']);\n } else if (config.debug) log('cached model:', models[1]['modelUrl']);\n return models[1];\n}\n\nexport async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {\n if (!models[0]) await loadDetect(config);\n if (!models[1]) await loadPose(config);\n return models;\n}\n\nfunction calculateBoxes(keypoints: Array, outputSize: [number, number]): { keypointsBox: Box, keypointsBoxRaw: Box } {\n const x = keypoints.map((a) => a.position[0]);\n const y = keypoints.map((a) => a.position[1]);\n const keypointsBox: Box = [Math.min(...x), Math.min(...y), Math.max(...x) - Math.min(...x), Math.max(...y) - Math.min(...y)];\n const keypointsBoxRaw: Box = [keypointsBox[0] / outputSize[0], keypointsBox[1] / outputSize[1], keypointsBox[2] / outputSize[0], keypointsBox[3] / outputSize[1]];\n return { keypointsBox, keypointsBoxRaw };\n}\n\nasync function prepareImage(input: Tensor): Promise {\n const t: Record = {};\n if (!input.shape || !input.shape[1] || !input.shape[2]) return input;\n padding = [\n [0, 0], // dont touch batch\n [input.shape[2] > input.shape[1] ? Math.trunc((input.shape[2] - input.shape[1]) / 2) : 0, input.shape[2] > input.shape[1] ? Math.trunc((input.shape[2] - input.shape[1]) / 2) : 0], // height before&after\n [input.shape[1] > input.shape[2] ? Math.trunc((input.shape[1] - input.shape[2]) / 2) : 0, input.shape[1] > input.shape[2] ? Math.trunc((input.shape[1] - input.shape[2]) / 2) : 0], // width before&after\n [0, 0], // dont touch rbg\n ];\n t.pad = tf.pad(input, padding);\n t.resize = tf.image.resizeBilinear(t.pad, [inputSize[1][0], inputSize[1][1]]);\n const final = tf.div(t.resize, 255);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return final;\n}\n\nfunction rescaleKeypoints(keypoints: Array, outputSize: [number, number]): Array {\n for (const kpt of keypoints) {\n kpt.position = [\n kpt.position[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0] - padding[2][0],\n kpt.position[1] * (outputSize[1] + padding[1][0] + padding[1][1]) / outputSize[1] - padding[1][0],\n kpt.position[2] as number,\n ];\n kpt.positionRaw = [\n kpt.position[0] / outputSize[0], kpt.position[1] / outputSize[1], kpt.position[2] as number,\n ];\n }\n return keypoints;\n}\n\nconst sigmoid = (x) => (1 - (1 / (1 + Math.exp(x))));\n\nasync function detectParts(input: Tensor, config: Config, outputSize: [number, number]): Promise {\n const t: Record = {};\n t.input = await prepareImage(input);\n /**\n * t.ld: 39 keypoints [x,y,z,score,presence] normalized to input size\n * t.segmentation:\n * t.heatmap:\n * t.world: 39 keypoints [x,y,z] normalized to -1..1\n * t.poseflag: body score\n */\n [t.ld/* 1,195(39*5) */, t.segmentation/* 1,256,256,1 */, t.heatmap/* 1,64,64,39 */, t.world/* 1,117(39*3) */, t.poseflag/* 1,1 */] = models[1]?.execute(t.input, outputNodes) as Tensor[]; // run model\n const poseScoreRaw = (await t.poseflag.data())[0];\n const poseScore = Math.max(0, (poseScoreRaw - 0.8) / (1 - 0.8)); // blow up score variance 5x\n const points = await t.ld.data();\n const keypointsRelative: Array = [];\n const depth = 5; // each points has x,y,z,visibility,presence\n for (let i = 0; i < points.length / depth; i++) {\n const score = sigmoid(points[depth * i + 3]);\n const presence = sigmoid(points[depth * i + 4]);\n const adjScore = Math.trunc(100 * score * presence * poseScore) / 100;\n const positionRaw: Point = [points[depth * i + 0] / inputSize[1][0], points[depth * i + 1] / inputSize[1][1], points[depth * i + 2] + 0];\n const position: Point = [Math.trunc(outputSize[0] * positionRaw[0]), Math.trunc(outputSize[1] * positionRaw[1]), positionRaw[2] as number];\n keypointsRelative.push({ part: coords.kpt[i], positionRaw, position, score: adjScore });\n }\n if (poseScore < (config.body.minConfidence || 0)) return null;\n const keypoints: Array = rescaleKeypoints(keypointsRelative, outputSize); // keypoints were relative to input image which is cropped\n const boxes = calculateBoxes(keypoints, [outputSize[0], outputSize[1]]); // now find boxes based on rescaled keypoints\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n const annotations: Record = {};\n for (const [name, indexes] of Object.entries(coords.connected)) {\n const pt: Array = [];\n for (let i = 0; i < indexes.length - 1; i++) {\n const pt0 = keypoints.find((kpt) => kpt.part === indexes[i]);\n const pt1 = keypoints.find((kpt) => kpt.part === indexes[i + 1]);\n if (pt0 && pt1 && pt0.score > (config.body.minConfidence || 0) && pt1.score > (config.body.minConfidence || 0)) pt.push([pt0.position, pt1.position]);\n }\n annotations[name] = pt;\n }\n const body = { id: 0, score: Math.trunc(100 * poseScore) / 100, box: boxes.keypointsBox, boxRaw: boxes.keypointsBoxRaw, keypoints, annotations };\n return body;\n}\n\nexport async function predict(input: Tensor, config: Config): Promise {\n const outputSize: [number, number] = [input.shape[2] || 0, input.shape[1] || 0];\n const skipTime = (config.body.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.body.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame && cache !== null) {\n skipped++;\n } else {\n cache = await detectParts(input, config, outputSize);\n lastTime = now();\n skipped = 0;\n }\n if (cache) return [cache];\n return [];\n}\n", "/**\n * CoCo Labels used by object detection implementations\n */\nexport const labels = [\n { class: 1, label: 'person' },\n { class: 2, label: 'bicycle' },\n { class: 3, label: 'car' },\n { class: 4, label: 'motorcycle' },\n { class: 5, label: 'airplane' },\n { class: 6, label: 'bus' },\n { class: 7, label: 'train' },\n { class: 8, label: 'truck' },\n { class: 9, label: 'boat' },\n { class: 10, label: 'traffic light' },\n { class: 11, label: 'fire hydrant' },\n { class: 12, label: 'stop sign' },\n { class: 13, label: 'parking meter' },\n { class: 14, label: 'bench' },\n { class: 15, label: 'bird' },\n { class: 16, label: 'cat' },\n { class: 17, label: 'dog' },\n { class: 18, label: 'horse' },\n { class: 19, label: 'sheep' },\n { class: 20, label: 'cow' },\n { class: 21, label: 'elephant' },\n { class: 22, label: 'bear' },\n { class: 23, label: 'zebra' },\n { class: 24, label: 'giraffe' },\n { class: 25, label: 'backpack' },\n { class: 26, label: 'umbrella' },\n { class: 27, label: 'handbag' },\n { class: 28, label: 'tie' },\n { class: 29, label: 'suitcase' },\n { class: 30, label: 'frisbee' },\n { class: 31, label: 'skis' },\n { class: 32, label: 'snowboard' },\n { class: 33, label: 'sports ball' },\n { class: 34, label: 'kite' },\n { class: 35, label: 'baseball bat' },\n { class: 36, label: 'baseball glove' },\n { class: 37, label: 'skateboard' },\n { class: 38, label: 'surfboard' },\n { class: 39, label: 'tennis racket' },\n { class: 40, label: 'bottle' },\n { class: 41, label: 'wine glass' },\n { class: 42, label: 'cup' },\n { class: 43, label: 'fork' },\n { class: 44, label: 'knife' },\n { class: 45, label: 'spoon' },\n { class: 46, label: 'bowl' },\n { class: 47, label: 'banana' },\n { class: 48, label: 'apple' },\n { class: 49, label: 'sandwich' },\n { class: 50, label: 'orange' },\n { class: 51, label: 'broccoli' },\n { class: 52, label: 'carrot' },\n { class: 53, label: 'hot dog' },\n { class: 54, label: 'pizza' },\n { class: 55, label: 'donut' },\n { class: 56, label: 'cake' },\n { class: 57, label: 'chair' },\n { class: 58, label: 'couch' },\n { class: 59, label: 'potted plant' },\n { class: 60, label: 'bed' },\n { class: 61, label: 'dining table' },\n { class: 62, label: 'toilet' },\n { class: 63, label: 'tv' },\n { class: 64, label: 'laptop' },\n { class: 65, label: 'mouse' },\n { class: 66, label: 'remote' },\n { class: 67, label: 'keyboard' },\n { class: 68, label: 'cell phone' },\n { class: 69, label: 'microwave' },\n { class: 70, label: 'oven' },\n { class: 71, label: 'toaster' },\n { class: 72, label: 'sink' },\n { class: 73, label: 'refrigerator' },\n { class: 74, label: 'book' },\n { class: 75, label: 'clock' },\n { class: 76, label: 'vase' },\n { class: 77, label: 'scissors' },\n { class: 78, label: 'teddy bear' },\n { class: 79, label: 'hair drier' },\n { class: 80, label: 'toothbrush' },\n];\n", "/**\n * CenterNet object detection model implementation\n *\n * Based on: [**NanoDet**](https://github.com/RangiLyu/nanodet)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport { labels } from './labels';\nimport type { ObjectResult, Box } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\nlet inputSize = 0;\nlet last: ObjectResult[] = [];\nlet lastTime = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n // fakeOps(['floormod'], config);\n model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath || '')) as unknown as GraphModel;\n const inputs = Object.values(model.modelSignature['inputs']);\n inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;\n if (!model || !model['modelUrl']) log('load model failed:', config.object.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\nasync function process(res: Tensor | null, outputShape, config: Config) {\n if (!res) return [];\n const results: Array = [];\n const detections = await res.array();\n const squeezeT = tf.squeeze(res);\n tf.dispose(res);\n const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class\n tf.dispose(squeezeT);\n const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x\n const boxesT = tf.squeeze(stackT);\n tf.dispose(stackT);\n const scoresT = tf.squeeze(arr[4]);\n const classesT = tf.squeeze(arr[5]);\n arr.forEach((t) => tf.dispose(t));\n const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);\n tf.dispose(boxesT);\n tf.dispose(scoresT);\n tf.dispose(classesT);\n const nms = await nmsT.data();\n tf.dispose(nmsT);\n let i = 0;\n for (const id of nms) {\n const score = Math.trunc(100 * detections[0][id][4]) / 100;\n const classVal = detections[0][id][5];\n const label = labels[classVal].label;\n const [x, y] = [\n detections[0][id][0] / inputSize,\n detections[0][id][1] / inputSize,\n ];\n const boxRaw: Box = [\n x,\n y,\n detections[0][id][2] / inputSize - x,\n detections[0][id][3] / inputSize - y,\n ];\n const box: Box = [\n Math.trunc(boxRaw[0] * outputShape[0]),\n Math.trunc(boxRaw[1] * outputShape[1]),\n Math.trunc(boxRaw[2] * outputShape[0]),\n Math.trunc(boxRaw[3] * outputShape[1]),\n ];\n results.push({ id: i++, score, class: classVal, label, box, boxRaw });\n }\n return results;\n}\n\nexport async function predict(input: Tensor, config: Config): Promise {\n const skipTime = (config.object.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.object.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame && (last.length > 0)) {\n skipped++;\n return last;\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n const outputSize = [input.shape[2], input.shape[1]];\n const resize = tf.image.resizeBilinear(input, [inputSize, inputSize]);\n const objectT = config.object.enabled ? model?.execute(resize, ['tower_0/detections']) as Tensor : null;\n lastTime = now();\n tf.dispose(resize);\n\n const obj = await process(objectT, outputSize, config);\n last = obj;\n\n resolve(obj);\n });\n}\n", "export const kpt: Array = [\n 'head',\n 'neck',\n 'rightShoulder',\n 'rightElbow',\n 'rightWrist',\n 'chest',\n 'leftShoulder',\n 'leftElbow',\n 'leftWrist',\n 'bodyCenter',\n 'rightHip',\n 'rightKnee',\n 'rightAnkle',\n 'leftHip',\n 'leftKnee',\n 'leftAnkle',\n];\n\nexport const connected: Record = {\n leftLeg: ['leftHip', 'leftKnee', 'leftAnkle'],\n rightLeg: ['rightHip', 'rightKnee', 'rightAnkle'],\n torso: ['leftShoulder', 'rightShoulder', 'rightHip', 'leftHip', 'leftShoulder'],\n leftArm: ['leftShoulder', 'leftElbow', 'leftWrist'],\n rightArm: ['rightShoulder', 'rightElbow', 'rightWrist'],\n head: [],\n};\n", "/**\n * EfficientPose model implementation\n *\n * Based on: [**EfficientPose**](https://github.com/daniegr/EfficientPose)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as coords from './efficientposecoords';\nimport type { BodyResult, Point } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\nlet lastTime = 0;\nconst cache: BodyResult = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };\n\n// const keypoints: Array = [];\n// let box: Box = [0, 0, 0, 0];\n// let boxRaw: Box = [0, 0, 0, 0];\n// let score = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\n// performs argmax and max functions on a 2d tensor\nfunction max2d(inputs, minScore) {\n const [width, height] = inputs.shape;\n return tf.tidy(() => {\n const reshaped = tf.reshape(inputs, [height * width]); // combine all data\n const newScore = tf.max(reshaped, 0).dataSync()[0]; // get highest score // inside tf.tidy\n if (newScore > minScore) { // skip coordinate calculation is score is too low\n const coordinates = tf.argMax(reshaped, 0);\n const x = tf.mod(coordinates, width).dataSync()[0]; // inside tf.tidy\n const y = tf.div(coordinates, tf.scalar(width, 'int32')).dataSync()[0]; // inside tf.tidy\n return [x, y, newScore];\n }\n return [0, 0, newScore];\n });\n}\n\nexport async function predict(image: Tensor, config: Config): Promise {\n const skipTime = (config.body.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.body.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame && Object.keys(cache.keypoints).length > 0) {\n skipped++;\n return [cache];\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n const tensor = tf.tidy(() => {\n if (!model?.inputs[0].shape) return null;\n const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);\n const enhance = tf.mul(resize, 2);\n const norm = enhance.sub(1);\n return norm;\n });\n\n let resT;\n if (config.body.enabled) resT = model?.execute(tensor);\n lastTime = now();\n tf.dispose(tensor);\n\n if (resT) {\n cache.keypoints.length = 0;\n const squeeze = resT.squeeze();\n tf.dispose(resT);\n // body parts are basically just a stack of 2d tensors\n const stack = squeeze.unstack(2);\n tf.dispose(squeeze);\n // process each unstacked tensor as a separate body part\n for (let id = 0; id < stack.length; id++) {\n // actual processing to get coordinates and score\n const [x, y, partScore] = max2d(stack[id], config.body.minConfidence);\n if (partScore > (config.body?.minConfidence || 0)) {\n cache.keypoints.push({\n score: Math.round(100 * partScore) / 100,\n part: coords.kpt[id],\n positionRaw: [ // normalized to 0..1\n // @ts-ignore model is not undefined here\n x / model.inputs[0].shape[2], y / model.inputs[0].shape[1],\n ],\n position: [ // normalized to input image size\n // @ts-ignore model is not undefined here\n Math.round(image.shape[2] * x / model.inputs[0].shape[2]), Math.round(image.shape[1] * y / model.inputs[0].shape[1]),\n ],\n });\n }\n }\n stack.forEach((s) => tf.dispose(s));\n }\n cache.score = cache.keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);\n const x = cache.keypoints.map((a) => a.position[0]);\n const y = cache.keypoints.map((a) => a.position[1]);\n cache.box = [\n Math.min(...x),\n Math.min(...y),\n Math.max(...x) - Math.min(...x),\n Math.max(...y) - Math.min(...y),\n ];\n const xRaw = cache.keypoints.map((a) => a.positionRaw[0]);\n const yRaw = cache.keypoints.map((a) => a.positionRaw[1]);\n cache.boxRaw = [\n Math.min(...xRaw),\n Math.min(...yRaw),\n Math.max(...xRaw) - Math.min(...xRaw),\n Math.max(...yRaw) - Math.min(...yRaw),\n ];\n for (const [name, indexes] of Object.entries(coords.connected)) {\n const pt: Array = [];\n for (let i = 0; i < indexes.length - 1; i++) {\n const pt0 = cache.keypoints.find((kpt) => kpt.part === indexes[i]);\n const pt1 = cache.keypoints.find((kpt) => kpt.part === indexes[i + 1]);\n if (pt0 && pt1 && pt0.score > (config.body.minConfidence || 0) && pt1.score > (config.body.minConfidence || 0)) pt.push([pt0.position, pt1.position]);\n }\n cache.annotations[name] = pt;\n }\n resolve([cache]);\n });\n}\n", "/**\n * Emotion model implementation\n *\n * [**Oarriaga**](https://github.com/oarriaga/face_classification)\n */\n\nimport { log, join, now } from '../util/util';\nimport type { Config } from '../config';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport { env } from '../util/env';\n\nconst annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];\nlet model: GraphModel | null;\n// let last: Array<{ score: number, emotion: string }> = [];\nconst last: Array> = [];\nlet lastCount = 0;\nlet lastTime = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.emotion?.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.emotion?.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\nexport async function predict(image: Tensor, config: Config, idx, count) {\n if (!model) return null;\n const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);\n const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);\n if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx] && (last[idx].length > 0)) {\n skipped++;\n return last[idx];\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n const obj: Array<{ score: number, emotion: string }> = [];\n if (config.face.emotion?.enabled) {\n const t: Record = {};\n const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0;\n t.resize = tf.image.resizeBilinear(image, [inputSize, inputSize], false);\n // const box = [[0.15, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right\n // const resize = tf.image.cropAndResize(image, box, [0], [inputSize, inputSize]);\n [t.red, t.green, t.blue] = tf.split(t.resize, 3, 3);\n // weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n t.redNorm = tf.mul(t.red, rgb[0]);\n t.greenNorm = tf.mul(t.green, rgb[1]);\n t.blueNorm = tf.mul(t.blue, rgb[2]);\n t.grayscale = tf.addN([t.redNorm, t.greenNorm, t.blueNorm]);\n t.grayscaleSub = tf.sub(t.grayscale, 0.5);\n t.grayscaleMul = tf.mul(t.grayscaleSub, 2);\n t.emotion = model?.execute(t.grayscaleMul) as Tensor; // result is already in range 0..1, no need for additional activation\n lastTime = now();\n const data = await t.emotion.data();\n for (let i = 0; i < data.length; i++) {\n if (data[i] > (config.face.emotion?.minConfidence || 0)) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] });\n }\n obj.sort((a, b) => b.score - a.score);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n }\n last[idx] = obj;\n lastCount = count;\n resolve(obj);\n });\n}\n", "import * as coords from './facemeshcoords';\nimport * as util from './facemeshutil';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport { env } from '../util/env';\nimport { log, join } from '../util/util';\nimport type { Config } from '../config';\nimport type { Point } from '../result';\n\nlet model: GraphModel | null;\nlet inputSize = 0;\n\nconst irisEnlarge = 2.3;\n\nconst leftOutline = coords.meshAnnotations['leftEyeLower0'];\nconst rightOutline = coords.meshAnnotations['rightEyeLower0'];\n\nconst eyeLandmarks = {\n leftBounds: [leftOutline[0], leftOutline[leftOutline.length - 1]],\n rightBounds: [rightOutline[0], rightOutline[rightOutline.length - 1]],\n};\n\nconst irisLandmarks = {\n upperCenter: 3,\n lowerCenter: 4,\n index: 71,\n numCoordinates: 76,\n};\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.iris?.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.iris?.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;\n if (inputSize === -1) inputSize = 64;\n return model;\n}\n\n// Replace the raw coordinates returned by facemesh with refined iris model coordinates\n// Update the z coordinate to be an average of the original and the new.\nfunction replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {\n for (let i = 0; i < coords.MESH_TO_IRIS_INDICES_MAP.length; i++) {\n const { key, indices } = coords.MESH_TO_IRIS_INDICES_MAP[i];\n const originalIndices = coords.meshAnnotations[`${prefix}${key}`];\n if (!keys || keys.includes(key)) {\n for (let j = 0; j < indices.length; j++) {\n const index = indices[j];\n rawCoords[originalIndices[j]] = [\n newCoords[index][0], newCoords[index][1],\n (newCoords[index][2] + rawCoords[originalIndices[j]][2]) / 2,\n ];\n }\n }\n }\n}\n\n// eslint-disable-next-line class-methods-use-this\nexport const getLeftToRightEyeDepthDifference = (rawCoords) => {\n const leftEyeZ = rawCoords[eyeLandmarks.leftBounds[0]][2];\n const rightEyeZ = rawCoords[eyeLandmarks.rightBounds[0]][2];\n return leftEyeZ - rightEyeZ;\n};\n\n// Returns a box describing a cropped region around the eye fit for passing to the iris model.\nexport const getEyeBox = (rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false, meshSize) => {\n const box = util.squarifyBox(util.enlargeBox(util.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), irisEnlarge));\n const boxSize = util.getBoxSize(box);\n let crop = tf.image.cropAndResize(face, [[\n box.startPoint[1] / meshSize,\n box.startPoint[0] / meshSize, box.endPoint[1] / meshSize,\n box.endPoint[0] / meshSize,\n ]], [0], [inputSize, inputSize]);\n if (flip && env.kernels.includes('flipleftright')) {\n const flipped = tf.image.flipLeftRight(crop); // flipLeftRight is not defined for tfjs-node\n tf.dispose(crop);\n crop = flipped;\n }\n return { box, boxSize, crop };\n};\n\n// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.\nexport const getEyeCoords = (eyeData, eyeBox, eyeBoxSize, flip = false) => {\n const eyeRawCoords: Array = [];\n for (let i = 0; i < irisLandmarks.numCoordinates; i++) {\n const x = eyeData[i * 3];\n const y = eyeData[i * 3 + 1];\n const z = eyeData[i * 3 + 2];\n eyeRawCoords.push([\n (flip ? (1 - (x / inputSize)) : (x / inputSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],\n (y / inputSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,\n ]);\n }\n return { rawCoords: eyeRawCoords, iris: eyeRawCoords.slice(irisLandmarks.index) };\n};\n\n// The z-coordinates returned for the iris are unreliable, so we take the z values from the surrounding keypoints.\n// eslint-disable-next-line class-methods-use-this\nexport const getAdjustedIrisCoords = (rawCoords, irisCoords, direction) => {\n const upperCenterZ = rawCoords[coords.meshAnnotations[`${direction}EyeUpper0`][irisLandmarks.upperCenter]][2];\n const lowerCenterZ = rawCoords[coords.meshAnnotations[`${direction}EyeLower0`][irisLandmarks.lowerCenter]][2];\n const averageZ = (upperCenterZ + lowerCenterZ) / 2;\n // Iris indices: 0: center | 1: right | 2: above | 3: left | 4: below\n return irisCoords.map((coord, i) => {\n let z = averageZ;\n if (i === 2) {\n z = upperCenterZ;\n } else if (i === 4) {\n z = lowerCenterZ;\n }\n return [coord[0], coord[1], z];\n });\n};\n\nexport async function augmentIris(rawCoords, face, config, meshSize) {\n if (!model) {\n if (config.debug) log('face mesh iris detection requested, but model is not loaded');\n return rawCoords;\n }\n const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = getEyeBox(rawCoords, face, eyeLandmarks.leftBounds[0], eyeLandmarks.leftBounds[1], true, meshSize);\n const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = getEyeBox(rawCoords, face, eyeLandmarks.rightBounds[0], eyeLandmarks.rightBounds[1], true, meshSize);\n const combined = tf.concat([leftEyeCrop, rightEyeCrop]);\n tf.dispose(leftEyeCrop);\n tf.dispose(rightEyeCrop);\n const eyePredictions = model.execute(combined) as Tensor;\n tf.dispose(combined);\n const eyePredictionsData = await eyePredictions.data();\n tf.dispose(eyePredictions);\n const leftEyeData = eyePredictionsData.slice(0, irisLandmarks.numCoordinates * 3);\n const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);\n const rightEyeData = eyePredictionsData.slice(irisLandmarks.numCoordinates * 3);\n const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);\n const leftToRightEyeDepthDifference = getLeftToRightEyeDepthDifference(rawCoords);\n if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', null);\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', null);\n // If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged\n // So we only update a single contour line above and below the eye.\n } else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);\n } else { // User is looking towards the left.\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);\n }\n const adjustedLeftIrisCoords = getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');\n const adjustedRightIrisCoords = getAdjustedIrisCoords(rawCoords, rightIrisRawCoords, 'right');\n const newCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);\n return newCoords;\n}\n", "/**\n * BlazeFace, FaceMesh & Iris model implementation\n *\n * Based on:\n * - [**MediaPipe BlazeFace**](https://drive.google.com/file/d/1f39lSzU5Oq-j_OXgS67KfN5wNsoeAZ4V/view)\n * - Facial Spacial Geometry: [**MediaPipe FaceMesh**](https://drive.google.com/file/d/1VFC_wIpw4O7xBOiTgUldl79d9LA-LsnA/view)\n * - Eye Iris Details: [**MediaPipe Iris**](https://drive.google.com/file/d/1bsWbokp9AklH2ANjCfmjqEzzxO1CNbMu/view)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as blazeface from './blazeface';\nimport * as util from './facemeshutil';\nimport * as coords from './facemeshcoords';\nimport * as iris from './iris';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { FaceResult, Point } from '../result';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\ntype BoxCache = { startPoint: Point, endPoint: Point, landmarks: Array, confidence: number };\nlet boxCache: Array = [];\nlet model: GraphModel | null = null;\nlet inputSize = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\nlet lastTime = 0;\nconst enlargeFact = 1.6;\n\nexport async function predict(input: Tensor, config: Config): Promise {\n // reset cached boxes\n\n const skipTime = (config.face.detector?.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.face.detector?.skipFrames || 0);\n if (!config.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {\n const possibleBoxes = await blazeface.getBoxes(input, config); // get results from blazeface detector\n lastTime = now();\n boxCache = []; // empty cache\n for (const possible of possibleBoxes.boxes) { // extract data from detector\n const box: BoxCache = {\n startPoint: possible.box.startPoint,\n endPoint: possible.box.endPoint,\n landmarks: possible.landmarks,\n confidence: possible.confidence,\n };\n boxCache.push(util.squarifyBox(util.enlargeBox(util.scaleBoxCoordinates(box, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));\n }\n skipped = 0;\n } else {\n skipped++;\n }\n const faces: Array = [];\n const newCache: Array = [];\n let id = 0;\n for (let i = 0; i < boxCache.length; i++) {\n let box = boxCache[i];\n let angle = 0;\n let rotationMatrix;\n const face: FaceResult = { // init face result\n id: id++,\n mesh: [],\n meshRaw: [],\n box: [0, 0, 0, 0],\n boxRaw: [0, 0, 0, 0],\n score: 0,\n boxScore: 0,\n faceScore: 0,\n annotations: {},\n };\n\n if (config.face.detector?.rotation && config.face.mesh?.enabled && env.kernels.includes('rotatewithoffset')) {\n [angle, rotationMatrix, face.tensor] = util.correctFaceRotation(box, input, inputSize);\n } else {\n rotationMatrix = util.fixedRotationMatrix;\n face.tensor = util.cutBoxFromImageAndResize(box, input, config.face.mesh?.enabled ? [inputSize, inputSize] : [blazeface.size(), blazeface.size()]);\n }\n face.boxScore = Math.round(100 * box.confidence) / 100;\n if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only\n face.box = util.getClampedBox(box, input);\n face.boxRaw = util.getRawBox(box, input);\n face.boxScore = Math.round(100 * box.confidence || 0) / 100;\n face.score = face.boxScore;\n face.mesh = box.landmarks.map((pt) => [\n ((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),\n ((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),\n ]);\n face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);\n for (const key of Object.keys(coords.blazeFaceLandmarks)) face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations\n } else if (!model) { // mesh enabled, but not loaded\n if (config.debug) log('face mesh detection requested, but model is not loaded');\n } else { // mesh enabled\n const [contours, confidence, contourCoords] = model.execute(face.tensor as Tensor) as Array; // first returned tensor represents facial contours which are already included in the coordinates.\n const faceConfidence = await confidence.data();\n face.faceScore = Math.round(100 * faceConfidence[0]) / 100;\n const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);\n let rawCoords = await coordsReshaped.array();\n tf.dispose([contourCoords, coordsReshaped, confidence, contours]);\n if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh\n box.confidence = face.faceScore; // reset confidence of cached box\n } else {\n if (config.face.iris?.enabled) rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // augment results with iris\n face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh\n face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);\n for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations\n box = util.squarifyBox(util.enlargeBox(util.calculateLandmarksBoundingBox(face.mesh), enlargeFact)); // redefine box with mesh calculated one\n face.box = util.getClampedBox(box, input); // update detected box with box around the face mesh\n face.boxRaw = util.getRawBox(box, input);\n face.score = face.faceScore;\n newCache.push(box);\n\n // other modules prefer different crop for a face so we dispose it and do it again\n /*\n tf.dispose(face.tensor);\n face.tensor = config.face.detector?.rotation && config.face.mesh?.enabled && env.kernels.includes('rotatewithoffset')\n ? face.tensor = util.correctFaceRotation(util.enlargeBox(box, Math.sqrt(enlargeFact)), input, inputSize)[2]\n : face.tensor = util.cutBoxFromImageAndResize(util.enlargeBox(box, Math.sqrt(enlargeFact)), input, [inputSize, inputSize]);\n */\n }\n }\n faces.push(face);\n }\n boxCache = [...newCache]; // reset cache\n return faces;\n}\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.face.mesh?.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.face.mesh?.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;\n if (inputSize === -1) inputSize = 64;\n return model;\n}\n\nexport const triangulation = coords.TRI468;\nexport const uvmap = coords.UV468;\n", "/**\n * FaceRes model implementation\n *\n * Returns Age, Gender, Descriptor\n * Implements Face simmilarity function\n *\n * Based on: [**HSE-FaceRes**](https://github.com/HSE-asavchenko/HSE_FaceRec_tf)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\nconst last: Array<{\n age: number,\n gender: string,\n genderScore: number,\n descriptor: number[],\n}> = [];\n\nlet lastTime = 0;\nlet lastCount = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\nexport async function load(config: Config): Promise {\n const modelUrl = join(config.modelBasePath, config.face.description?.modelPath || '');\n if (env.initial) model = null;\n if (!model) {\n model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;\n if (!model) log('load model failed:', config.face.description?.modelPath || '');\n else if (config.debug) log('load model:', modelUrl);\n } else if (config.debug) log('cached model:', modelUrl);\n return model;\n}\n\nexport function enhance(input): Tensor {\n const tensor = (input.image || input.tensor || input) as Tensor; // input received from detector is already normalized to 0..1, input is also assumed to be straightened\n if (!model?.inputs[0].shape) return tensor; // model has no shape so no point continuing\n // do a tight crop of image and resize it to fit the model\n const crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);\n /*\n const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right\n const crop = (tensor.shape.length === 3)\n ? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing\n : tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);\n */\n /*\n // just resize to fit the embedding model instead of cropping\n const crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);\n */\n\n /*\n // convert to black&white to avoid colorization impact\n const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n const [red, green, blue] = tf.split(crop, 3, 3);\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n const merge = tf.stack([grayscale, grayscale, grayscale], 3).squeeze(4);\n */\n\n /*\n // increase image pseudo-contrast 100%\n // (or do it per-channel so mean is done on each channel)\n // (or calculate histogram and do it based on histogram)\n const mean = merge.mean();\n const factor = 2;\n const contrast = merge.sub(mean).mul(factor).add(mean);\n */\n\n /*\n // normalize brightness from 0..1\n // silly way of creating pseudo-hdr of image\n const darken = crop.sub(crop.min());\n const lighten = darken.div(darken.max());\n */\n const norm = tf.mul(crop, 255);\n tf.dispose(crop);\n return norm;\n}\n\nexport async function predict(image: Tensor, config: Config, idx, count) {\n if (!model) return null;\n const skipFrame = skipped < (config.face.description?.skipFrames || 0);\n const skipTime = (config.face.description?.skipTime || 0) > (now() - lastTime);\n if (config.skipAllowed && skipFrame && skipTime && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) {\n skipped++;\n return last[idx];\n }\n skipped = 0;\n return new Promise(async (resolve) => {\n const obj = {\n age: 0,\n gender: 'unknown',\n genderScore: 0,\n descriptor: [],\n };\n\n if (config.face.description?.enabled) {\n const enhanced = enhance(image);\n const resT = model?.execute(enhanced) as Tensor[];\n lastTime = now();\n tf.dispose(enhanced);\n const genderT = await resT.find((t) => t.shape[1] === 1) as Tensor;\n const gender = await genderT.data();\n const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;\n if (confidence > (config.face.description?.minConfidence || 0)) {\n obj.gender = gender[0] <= 0.5 ? 'female' : 'male';\n obj.genderScore = Math.min(0.99, confidence);\n }\n const argmax = tf.argMax(resT.find((t) => t.shape[1] === 100), 1);\n const age = (await argmax.data())[0];\n tf.dispose(argmax);\n const ageT = resT.find((t) => t.shape[1] === 100) as Tensor;\n const all = await ageT.data();\n obj.age = Math.round(all[age - 1] > all[age + 1] ? 10 * age - 100 * all[age - 1] : 10 * age + 100 * all[age + 1]) / 10;\n\n const desc = resT.find((t) => t.shape[1] === 1024);\n // const reshape = desc.reshape([128, 8]); // reshape large 1024-element descriptor to 128 x 8\n // const reduce = reshape.logSumExp(1); // reduce 2nd dimension by calculating logSumExp on it which leaves us with 128-element descriptor\n const descriptor = desc ? await desc.data() : [];\n obj.descriptor = Array.from(descriptor);\n resT.forEach((t) => tf.dispose(t));\n }\n last[idx] = obj;\n lastCount = count;\n resolve(obj);\n });\n}\n", "import * as tf from '../../dist/tfjs.esm.js';\nimport type { Point } from '../result';\n\nexport function getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\n\nexport function getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\n\nexport function cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h,\n box.startPoint[0] / w,\n box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\n\nexport function scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]] as Point;\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]] as Point;\n const palmLandmarks = box.palmLandmarks.map((coord) => {\n const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];\n return scaledCoord;\n });\n return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };\n}\n\nexport function enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]] as Point;\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]] as Point;\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\n\nexport function squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize] as Point;\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize] as Point;\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\n\nexport function shiftBox(box, shiftFactor) {\n const boxSize = [\n box.endPoint[0] - box.startPoint[0],\n box.endPoint[1] - box.startPoint[1],\n ];\n const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];\n const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]] as Point;\n const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]] as Point;\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\n\nexport function normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\n\nexport function computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\n\nexport const buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\n\nexport function dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\n\nexport function getColumnFrom2DArr(arr, columnIndex) {\n const column: Array = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\n\nexport function multiplyTransformMatrices(mat1, mat2) {\n const product: Array = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\n\nexport function buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\n\nexport function invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\n\nexport function rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\n", "/**\n * HandPose model implementation constants\n * See `handpose.ts` for entry point\n */\n\nexport const anchors = [\n { x: 0.015625, y: 0.015625 },\n { x: 0.015625, y: 0.015625 },\n { x: 0.046875, y: 0.015625 },\n { x: 0.046875, y: 0.015625 },\n { x: 0.078125, y: 0.015625 },\n { x: 0.078125, y: 0.015625 },\n { x: 0.109375, y: 0.015625 },\n { x: 0.109375, y: 0.015625 },\n { x: 0.140625, y: 0.015625 },\n { x: 0.140625, y: 0.015625 },\n { x: 0.171875, y: 0.015625 },\n { x: 0.171875, y: 0.015625 },\n { x: 0.203125, y: 0.015625 },\n { x: 0.203125, y: 0.015625 },\n { x: 0.234375, y: 0.015625 },\n { x: 0.234375, y: 0.015625 },\n { x: 0.265625, y: 0.015625 },\n { x: 0.265625, y: 0.015625 },\n { x: 0.296875, y: 0.015625 },\n { x: 0.296875, y: 0.015625 },\n { x: 0.328125, y: 0.015625 },\n { x: 0.328125, y: 0.015625 },\n { x: 0.359375, y: 0.015625 },\n { x: 0.359375, y: 0.015625 },\n { x: 0.390625, y: 0.015625 },\n { x: 0.390625, y: 0.015625 },\n { x: 0.421875, y: 0.015625 },\n { x: 0.421875, y: 0.015625 },\n { x: 0.453125, y: 0.015625 },\n { x: 0.453125, y: 0.015625 },\n { x: 0.484375, y: 0.015625 },\n { x: 0.484375, y: 0.015625 },\n { x: 0.515625, y: 0.015625 },\n { x: 0.515625, y: 0.015625 },\n { x: 0.546875, y: 0.015625 },\n { x: 0.546875, y: 0.015625 },\n { x: 0.578125, y: 0.015625 },\n { x: 0.578125, y: 0.015625 },\n { x: 0.609375, y: 0.015625 },\n { x: 0.609375, y: 0.015625 },\n { x: 0.640625, y: 0.015625 },\n { x: 0.640625, y: 0.015625 },\n { x: 0.671875, y: 0.015625 },\n { x: 0.671875, y: 0.015625 },\n { x: 0.703125, y: 0.015625 },\n { x: 0.703125, y: 0.015625 },\n { x: 0.734375, y: 0.015625 },\n { x: 0.734375, y: 0.015625 },\n { x: 0.765625, y: 0.015625 },\n { x: 0.765625, y: 0.015625 },\n { x: 0.796875, y: 0.015625 },\n { x: 0.796875, y: 0.015625 },\n { x: 0.828125, y: 0.015625 },\n { x: 0.828125, y: 0.015625 },\n { x: 0.859375, y: 0.015625 },\n { x: 0.859375, y: 0.015625 },\n { x: 0.890625, y: 0.015625 },\n { x: 0.890625, y: 0.015625 },\n { x: 0.921875, y: 0.015625 },\n { x: 0.921875, y: 0.015625 },\n { x: 0.953125, y: 0.015625 },\n { x: 0.953125, y: 0.015625 },\n { x: 0.984375, y: 0.015625 },\n { x: 0.984375, y: 0.015625 },\n { x: 0.015625, y: 0.046875 },\n { x: 0.015625, y: 0.046875 },\n { x: 0.046875, y: 0.046875 },\n { x: 0.046875, y: 0.046875 },\n { x: 0.078125, y: 0.046875 },\n { x: 0.078125, y: 0.046875 },\n { x: 0.109375, y: 0.046875 },\n { x: 0.109375, y: 0.046875 },\n { x: 0.140625, y: 0.046875 },\n { x: 0.140625, y: 0.046875 },\n { x: 0.171875, y: 0.046875 },\n { x: 0.171875, y: 0.046875 },\n { x: 0.203125, y: 0.046875 },\n { x: 0.203125, y: 0.046875 },\n { x: 0.234375, y: 0.046875 },\n { x: 0.234375, y: 0.046875 },\n { x: 0.265625, y: 0.046875 },\n { x: 0.265625, y: 0.046875 },\n { x: 0.296875, y: 0.046875 },\n { x: 0.296875, y: 0.046875 },\n { x: 0.328125, y: 0.046875 },\n { x: 0.328125, y: 0.046875 },\n { x: 0.359375, y: 0.046875 },\n { x: 0.359375, y: 0.046875 },\n { x: 0.390625, y: 0.046875 },\n { x: 0.390625, y: 0.046875 },\n { x: 0.421875, y: 0.046875 },\n { x: 0.421875, y: 0.046875 },\n { x: 0.453125, y: 0.046875 },\n { x: 0.453125, y: 0.046875 },\n { x: 0.484375, y: 0.046875 },\n { x: 0.484375, y: 0.046875 },\n { x: 0.515625, y: 0.046875 },\n { x: 0.515625, y: 0.046875 },\n { x: 0.546875, y: 0.046875 },\n { x: 0.546875, y: 0.046875 },\n { x: 0.578125, y: 0.046875 },\n { x: 0.578125, y: 0.046875 },\n { x: 0.609375, y: 0.046875 },\n { x: 0.609375, y: 0.046875 },\n { x: 0.640625, y: 0.046875 },\n { x: 0.640625, y: 0.046875 },\n { x: 0.671875, y: 0.046875 },\n { x: 0.671875, y: 0.046875 },\n { x: 0.703125, y: 0.046875 },\n { x: 0.703125, y: 0.046875 },\n { x: 0.734375, y: 0.046875 },\n { x: 0.734375, y: 0.046875 },\n { x: 0.765625, y: 0.046875 },\n { x: 0.765625, y: 0.046875 },\n { x: 0.796875, y: 0.046875 },\n { x: 0.796875, y: 0.046875 },\n { x: 0.828125, y: 0.046875 },\n { x: 0.828125, y: 0.046875 },\n { x: 0.859375, y: 0.046875 },\n { x: 0.859375, y: 0.046875 },\n { x: 0.890625, y: 0.046875 },\n { x: 0.890625, y: 0.046875 },\n { x: 0.921875, y: 0.046875 },\n { x: 0.921875, y: 0.046875 },\n { x: 0.953125, y: 0.046875 },\n { x: 0.953125, y: 0.046875 },\n { x: 0.984375, y: 0.046875 },\n { x: 0.984375, y: 0.046875 },\n { x: 0.015625, y: 0.078125 },\n { x: 0.015625, y: 0.078125 },\n { x: 0.046875, y: 0.078125 },\n { x: 0.046875, y: 0.078125 },\n { x: 0.078125, y: 0.078125 },\n { x: 0.078125, y: 0.078125 },\n { x: 0.109375, y: 0.078125 },\n { x: 0.109375, y: 0.078125 },\n { x: 0.140625, y: 0.078125 },\n { x: 0.140625, y: 0.078125 },\n { x: 0.171875, y: 0.078125 },\n { x: 0.171875, y: 0.078125 },\n { x: 0.203125, y: 0.078125 },\n { x: 0.203125, y: 0.078125 },\n { x: 0.234375, y: 0.078125 },\n { x: 0.234375, y: 0.078125 },\n { x: 0.265625, y: 0.078125 },\n { x: 0.265625, y: 0.078125 },\n { x: 0.296875, y: 0.078125 },\n { x: 0.296875, y: 0.078125 },\n { x: 0.328125, y: 0.078125 },\n { x: 0.328125, y: 0.078125 },\n { x: 0.359375, y: 0.078125 },\n { x: 0.359375, y: 0.078125 },\n { x: 0.390625, y: 0.078125 },\n { x: 0.390625, y: 0.078125 },\n { x: 0.421875, y: 0.078125 },\n { x: 0.421875, y: 0.078125 },\n { x: 0.453125, y: 0.078125 },\n { x: 0.453125, y: 0.078125 },\n { x: 0.484375, y: 0.078125 },\n { x: 0.484375, y: 0.078125 },\n { x: 0.515625, y: 0.078125 },\n { x: 0.515625, y: 0.078125 },\n { x: 0.546875, y: 0.078125 },\n { x: 0.546875, y: 0.078125 },\n { x: 0.578125, y: 0.078125 },\n { x: 0.578125, y: 0.078125 },\n { x: 0.609375, y: 0.078125 },\n { x: 0.609375, y: 0.078125 },\n { x: 0.640625, y: 0.078125 },\n { x: 0.640625, y: 0.078125 },\n { x: 0.671875, y: 0.078125 },\n { x: 0.671875, y: 0.078125 },\n { x: 0.703125, y: 0.078125 },\n { x: 0.703125, y: 0.078125 },\n { x: 0.734375, y: 0.078125 },\n { x: 0.734375, y: 0.078125 },\n { x: 0.765625, y: 0.078125 },\n { x: 0.765625, y: 0.078125 },\n { x: 0.796875, y: 0.078125 },\n { x: 0.796875, y: 0.078125 },\n { x: 0.828125, y: 0.078125 },\n { x: 0.828125, y: 0.078125 },\n { x: 0.859375, y: 0.078125 },\n { x: 0.859375, y: 0.078125 },\n { x: 0.890625, y: 0.078125 },\n { x: 0.890625, y: 0.078125 },\n { x: 0.921875, y: 0.078125 },\n { x: 0.921875, y: 0.078125 },\n { x: 0.953125, y: 0.078125 },\n { x: 0.953125, y: 0.078125 },\n { x: 0.984375, y: 0.078125 },\n { x: 0.984375, y: 0.078125 },\n { x: 0.015625, y: 0.109375 },\n { x: 0.015625, y: 0.109375 },\n { x: 0.046875, y: 0.109375 },\n { x: 0.046875, y: 0.109375 },\n { x: 0.078125, y: 0.109375 },\n { x: 0.078125, y: 0.109375 },\n { x: 0.109375, y: 0.109375 },\n { x: 0.109375, y: 0.109375 },\n { x: 0.140625, y: 0.109375 },\n { x: 0.140625, y: 0.109375 },\n { x: 0.171875, y: 0.109375 },\n { x: 0.171875, y: 0.109375 },\n { x: 0.203125, y: 0.109375 },\n { x: 0.203125, y: 0.109375 },\n { x: 0.234375, y: 0.109375 },\n { x: 0.234375, y: 0.109375 },\n { x: 0.265625, y: 0.109375 },\n { x: 0.265625, y: 0.109375 },\n { x: 0.296875, y: 0.109375 },\n { x: 0.296875, y: 0.109375 },\n { x: 0.328125, y: 0.109375 },\n { x: 0.328125, y: 0.109375 },\n { x: 0.359375, y: 0.109375 },\n { x: 0.359375, y: 0.109375 },\n { x: 0.390625, y: 0.109375 },\n { x: 0.390625, y: 0.109375 },\n { x: 0.421875, y: 0.109375 },\n { x: 0.421875, y: 0.109375 },\n { x: 0.453125, y: 0.109375 },\n { x: 0.453125, y: 0.109375 },\n { x: 0.484375, y: 0.109375 },\n { x: 0.484375, y: 0.109375 },\n { x: 0.515625, y: 0.109375 },\n { x: 0.515625, y: 0.109375 },\n { x: 0.546875, y: 0.109375 },\n { x: 0.546875, y: 0.109375 },\n { x: 0.578125, y: 0.109375 },\n { x: 0.578125, y: 0.109375 },\n { x: 0.609375, y: 0.109375 },\n { x: 0.609375, y: 0.109375 },\n { x: 0.640625, y: 0.109375 },\n { x: 0.640625, y: 0.109375 },\n { x: 0.671875, y: 0.109375 },\n { x: 0.671875, y: 0.109375 },\n { x: 0.703125, y: 0.109375 },\n { x: 0.703125, y: 0.109375 },\n { x: 0.734375, y: 0.109375 },\n { x: 0.734375, y: 0.109375 },\n { x: 0.765625, y: 0.109375 },\n { x: 0.765625, y: 0.109375 },\n { x: 0.796875, y: 0.109375 },\n { x: 0.796875, y: 0.109375 },\n { x: 0.828125, y: 0.109375 },\n { x: 0.828125, y: 0.109375 },\n { x: 0.859375, y: 0.109375 },\n { x: 0.859375, y: 0.109375 },\n { x: 0.890625, y: 0.109375 },\n { x: 0.890625, y: 0.109375 },\n { x: 0.921875, y: 0.109375 },\n { x: 0.921875, y: 0.109375 },\n { x: 0.953125, y: 0.109375 },\n { x: 0.953125, y: 0.109375 },\n { x: 0.984375, y: 0.109375 },\n { x: 0.984375, y: 0.109375 },\n { x: 0.015625, y: 0.140625 },\n { x: 0.015625, y: 0.140625 },\n { x: 0.046875, y: 0.140625 },\n { x: 0.046875, y: 0.140625 },\n { x: 0.078125, y: 0.140625 },\n { x: 0.078125, y: 0.140625 },\n { x: 0.109375, y: 0.140625 },\n { x: 0.109375, y: 0.140625 },\n { x: 0.140625, y: 0.140625 },\n { x: 0.140625, y: 0.140625 },\n { x: 0.171875, y: 0.140625 },\n { x: 0.171875, y: 0.140625 },\n { x: 0.203125, y: 0.140625 },\n { x: 0.203125, y: 0.140625 },\n { x: 0.234375, y: 0.140625 },\n { x: 0.234375, y: 0.140625 },\n { x: 0.265625, y: 0.140625 },\n { x: 0.265625, y: 0.140625 },\n { x: 0.296875, y: 0.140625 },\n { x: 0.296875, y: 0.140625 },\n { x: 0.328125, y: 0.140625 },\n { x: 0.328125, y: 0.140625 },\n { x: 0.359375, y: 0.140625 },\n { x: 0.359375, y: 0.140625 },\n { x: 0.390625, y: 0.140625 },\n { x: 0.390625, y: 0.140625 },\n { x: 0.421875, y: 0.140625 },\n { x: 0.421875, y: 0.140625 },\n { x: 0.453125, y: 0.140625 },\n { x: 0.453125, y: 0.140625 },\n { x: 0.484375, y: 0.140625 },\n { x: 0.484375, y: 0.140625 },\n { x: 0.515625, y: 0.140625 },\n { x: 0.515625, y: 0.140625 },\n { x: 0.546875, y: 0.140625 },\n { x: 0.546875, y: 0.140625 },\n { x: 0.578125, y: 0.140625 },\n { x: 0.578125, y: 0.140625 },\n { x: 0.609375, y: 0.140625 },\n { x: 0.609375, y: 0.140625 },\n { x: 0.640625, y: 0.140625 },\n { x: 0.640625, y: 0.140625 },\n { x: 0.671875, y: 0.140625 },\n { x: 0.671875, y: 0.140625 },\n { x: 0.703125, y: 0.140625 },\n { x: 0.703125, y: 0.140625 },\n { x: 0.734375, y: 0.140625 },\n { x: 0.734375, y: 0.140625 },\n { x: 0.765625, y: 0.140625 },\n { x: 0.765625, y: 0.140625 },\n { x: 0.796875, y: 0.140625 },\n { x: 0.796875, y: 0.140625 },\n { x: 0.828125, y: 0.140625 },\n { x: 0.828125, y: 0.140625 },\n { x: 0.859375, y: 0.140625 },\n { x: 0.859375, y: 0.140625 },\n { x: 0.890625, y: 0.140625 },\n { x: 0.890625, y: 0.140625 },\n { x: 0.921875, y: 0.140625 },\n { x: 0.921875, y: 0.140625 },\n { x: 0.953125, y: 0.140625 },\n { x: 0.953125, y: 0.140625 },\n { x: 0.984375, y: 0.140625 },\n { x: 0.984375, y: 0.140625 },\n { x: 0.015625, y: 0.171875 },\n { x: 0.015625, y: 0.171875 },\n { x: 0.046875, y: 0.171875 },\n { x: 0.046875, y: 0.171875 },\n { x: 0.078125, y: 0.171875 },\n { x: 0.078125, y: 0.171875 },\n { x: 0.109375, y: 0.171875 },\n { x: 0.109375, y: 0.171875 },\n { x: 0.140625, y: 0.171875 },\n { x: 0.140625, y: 0.171875 },\n { x: 0.171875, y: 0.171875 },\n { x: 0.171875, y: 0.171875 },\n { x: 0.203125, y: 0.171875 },\n { x: 0.203125, y: 0.171875 },\n { x: 0.234375, y: 0.171875 },\n { x: 0.234375, y: 0.171875 },\n { x: 0.265625, y: 0.171875 },\n { x: 0.265625, y: 0.171875 },\n { x: 0.296875, y: 0.171875 },\n { x: 0.296875, y: 0.171875 },\n { x: 0.328125, y: 0.171875 },\n { x: 0.328125, y: 0.171875 },\n { x: 0.359375, y: 0.171875 },\n { x: 0.359375, y: 0.171875 },\n { x: 0.390625, y: 0.171875 },\n { x: 0.390625, y: 0.171875 },\n { x: 0.421875, y: 0.171875 },\n { x: 0.421875, y: 0.171875 },\n { x: 0.453125, y: 0.171875 },\n { x: 0.453125, y: 0.171875 },\n { x: 0.484375, y: 0.171875 },\n { x: 0.484375, y: 0.171875 },\n { x: 0.515625, y: 0.171875 },\n { x: 0.515625, y: 0.171875 },\n { x: 0.546875, y: 0.171875 },\n { x: 0.546875, y: 0.171875 },\n { x: 0.578125, y: 0.171875 },\n { x: 0.578125, y: 0.171875 },\n { x: 0.609375, y: 0.171875 },\n { x: 0.609375, y: 0.171875 },\n { x: 0.640625, y: 0.171875 },\n { x: 0.640625, y: 0.171875 },\n { x: 0.671875, y: 0.171875 },\n { x: 0.671875, y: 0.171875 },\n { x: 0.703125, y: 0.171875 },\n { x: 0.703125, y: 0.171875 },\n { x: 0.734375, y: 0.171875 },\n { x: 0.734375, y: 0.171875 },\n { x: 0.765625, y: 0.171875 },\n { x: 0.765625, y: 0.171875 },\n { x: 0.796875, y: 0.171875 },\n { x: 0.796875, y: 0.171875 },\n { x: 0.828125, y: 0.171875 },\n { x: 0.828125, y: 0.171875 },\n { x: 0.859375, y: 0.171875 },\n { x: 0.859375, y: 0.171875 },\n { x: 0.890625, y: 0.171875 },\n { x: 0.890625, y: 0.171875 },\n { x: 0.921875, y: 0.171875 },\n { x: 0.921875, y: 0.171875 },\n { x: 0.953125, y: 0.171875 },\n { x: 0.953125, y: 0.171875 },\n { x: 0.984375, y: 0.171875 },\n { x: 0.984375, y: 0.171875 },\n { x: 0.015625, y: 0.203125 },\n { x: 0.015625, y: 0.203125 },\n { x: 0.046875, y: 0.203125 },\n { x: 0.046875, y: 0.203125 },\n { x: 0.078125, y: 0.203125 },\n { x: 0.078125, y: 0.203125 },\n { x: 0.109375, y: 0.203125 },\n { x: 0.109375, y: 0.203125 },\n { x: 0.140625, y: 0.203125 },\n { x: 0.140625, y: 0.203125 },\n { x: 0.171875, y: 0.203125 },\n { x: 0.171875, y: 0.203125 },\n { x: 0.203125, y: 0.203125 },\n { x: 0.203125, y: 0.203125 },\n { x: 0.234375, y: 0.203125 },\n { x: 0.234375, y: 0.203125 },\n { x: 0.265625, y: 0.203125 },\n { x: 0.265625, y: 0.203125 },\n { x: 0.296875, y: 0.203125 },\n { x: 0.296875, y: 0.203125 },\n { x: 0.328125, y: 0.203125 },\n { x: 0.328125, y: 0.203125 },\n { x: 0.359375, y: 0.203125 },\n { x: 0.359375, y: 0.203125 },\n { x: 0.390625, y: 0.203125 },\n { x: 0.390625, y: 0.203125 },\n { x: 0.421875, y: 0.203125 },\n { x: 0.421875, y: 0.203125 },\n { x: 0.453125, y: 0.203125 },\n { x: 0.453125, y: 0.203125 },\n { x: 0.484375, y: 0.203125 },\n { x: 0.484375, y: 0.203125 },\n { x: 0.515625, y: 0.203125 },\n { x: 0.515625, y: 0.203125 },\n { x: 0.546875, y: 0.203125 },\n { x: 0.546875, y: 0.203125 },\n { x: 0.578125, y: 0.203125 },\n { x: 0.578125, y: 0.203125 },\n { x: 0.609375, y: 0.203125 },\n { x: 0.609375, y: 0.203125 },\n { x: 0.640625, y: 0.203125 },\n { x: 0.640625, y: 0.203125 },\n { x: 0.671875, y: 0.203125 },\n { x: 0.671875, y: 0.203125 },\n { x: 0.703125, y: 0.203125 },\n { x: 0.703125, y: 0.203125 },\n { x: 0.734375, y: 0.203125 },\n { x: 0.734375, y: 0.203125 },\n { x: 0.765625, y: 0.203125 },\n { x: 0.765625, y: 0.203125 },\n { x: 0.796875, y: 0.203125 },\n { x: 0.796875, y: 0.203125 },\n { x: 0.828125, y: 0.203125 },\n { x: 0.828125, y: 0.203125 },\n { x: 0.859375, y: 0.203125 },\n { x: 0.859375, y: 0.203125 },\n { x: 0.890625, y: 0.203125 },\n { x: 0.890625, y: 0.203125 },\n { x: 0.921875, y: 0.203125 },\n { x: 0.921875, y: 0.203125 },\n { x: 0.953125, y: 0.203125 },\n { x: 0.953125, y: 0.203125 },\n { x: 0.984375, y: 0.203125 },\n { x: 0.984375, y: 0.203125 },\n { x: 0.015625, y: 0.234375 },\n { x: 0.015625, y: 0.234375 },\n { x: 0.046875, y: 0.234375 },\n { x: 0.046875, y: 0.234375 },\n { x: 0.078125, y: 0.234375 },\n { x: 0.078125, y: 0.234375 },\n { x: 0.109375, y: 0.234375 },\n { x: 0.109375, y: 0.234375 },\n { x: 0.140625, y: 0.234375 },\n { x: 0.140625, y: 0.234375 },\n { x: 0.171875, y: 0.234375 },\n { x: 0.171875, y: 0.234375 },\n { x: 0.203125, y: 0.234375 },\n { x: 0.203125, y: 0.234375 },\n { x: 0.234375, y: 0.234375 },\n { x: 0.234375, y: 0.234375 },\n { x: 0.265625, y: 0.234375 },\n { x: 0.265625, y: 0.234375 },\n { x: 0.296875, y: 0.234375 },\n { x: 0.296875, y: 0.234375 },\n { x: 0.328125, y: 0.234375 },\n { x: 0.328125, y: 0.234375 },\n { x: 0.359375, y: 0.234375 },\n { x: 0.359375, y: 0.234375 },\n { x: 0.390625, y: 0.234375 },\n { x: 0.390625, y: 0.234375 },\n { x: 0.421875, y: 0.234375 },\n { x: 0.421875, y: 0.234375 },\n { x: 0.453125, y: 0.234375 },\n { x: 0.453125, y: 0.234375 },\n { x: 0.484375, y: 0.234375 },\n { x: 0.484375, y: 0.234375 },\n { x: 0.515625, y: 0.234375 },\n { x: 0.515625, y: 0.234375 },\n { x: 0.546875, y: 0.234375 },\n { x: 0.546875, y: 0.234375 },\n { x: 0.578125, y: 0.234375 },\n { x: 0.578125, y: 0.234375 },\n { x: 0.609375, y: 0.234375 },\n { x: 0.609375, y: 0.234375 },\n { x: 0.640625, y: 0.234375 },\n { x: 0.640625, y: 0.234375 },\n { x: 0.671875, y: 0.234375 },\n { x: 0.671875, y: 0.234375 },\n { x: 0.703125, y: 0.234375 },\n { x: 0.703125, y: 0.234375 },\n { x: 0.734375, y: 0.234375 },\n { x: 0.734375, y: 0.234375 },\n { x: 0.765625, y: 0.234375 },\n { x: 0.765625, y: 0.234375 },\n { x: 0.796875, y: 0.234375 },\n { x: 0.796875, y: 0.234375 },\n { x: 0.828125, y: 0.234375 },\n { x: 0.828125, y: 0.234375 },\n { x: 0.859375, y: 0.234375 },\n { x: 0.859375, y: 0.234375 },\n { x: 0.890625, y: 0.234375 },\n { x: 0.890625, y: 0.234375 },\n { x: 0.921875, y: 0.234375 },\n { x: 0.921875, y: 0.234375 },\n { x: 0.953125, y: 0.234375 },\n { x: 0.953125, y: 0.234375 },\n { x: 0.984375, y: 0.234375 },\n { x: 0.984375, y: 0.234375 },\n { x: 0.015625, y: 0.265625 },\n { x: 0.015625, y: 0.265625 },\n { x: 0.046875, y: 0.265625 },\n { x: 0.046875, y: 0.265625 },\n { x: 0.078125, y: 0.265625 },\n { x: 0.078125, y: 0.265625 },\n { x: 0.109375, y: 0.265625 },\n { x: 0.109375, y: 0.265625 },\n { x: 0.140625, y: 0.265625 },\n { x: 0.140625, y: 0.265625 },\n { x: 0.171875, y: 0.265625 },\n { x: 0.171875, y: 0.265625 },\n { x: 0.203125, y: 0.265625 },\n { x: 0.203125, y: 0.265625 },\n { x: 0.234375, y: 0.265625 },\n { x: 0.234375, y: 0.265625 },\n { x: 0.265625, y: 0.265625 },\n { x: 0.265625, y: 0.265625 },\n { x: 0.296875, y: 0.265625 },\n { x: 0.296875, y: 0.265625 },\n { x: 0.328125, y: 0.265625 },\n { x: 0.328125, y: 0.265625 },\n { x: 0.359375, y: 0.265625 },\n { x: 0.359375, y: 0.265625 },\n { x: 0.390625, y: 0.265625 },\n { x: 0.390625, y: 0.265625 },\n { x: 0.421875, y: 0.265625 },\n { x: 0.421875, y: 0.265625 },\n { x: 0.453125, y: 0.265625 },\n { x: 0.453125, y: 0.265625 },\n { x: 0.484375, y: 0.265625 },\n { x: 0.484375, y: 0.265625 },\n { x: 0.515625, y: 0.265625 },\n { x: 0.515625, y: 0.265625 },\n { x: 0.546875, y: 0.265625 },\n { x: 0.546875, y: 0.265625 },\n { x: 0.578125, y: 0.265625 },\n { x: 0.578125, y: 0.265625 },\n { x: 0.609375, y: 0.265625 },\n { x: 0.609375, y: 0.265625 },\n { x: 0.640625, y: 0.265625 },\n { x: 0.640625, y: 0.265625 },\n { x: 0.671875, y: 0.265625 },\n { x: 0.671875, y: 0.265625 },\n { x: 0.703125, y: 0.265625 },\n { x: 0.703125, y: 0.265625 },\n { x: 0.734375, y: 0.265625 },\n { x: 0.734375, y: 0.265625 },\n { x: 0.765625, y: 0.265625 },\n { x: 0.765625, y: 0.265625 },\n { x: 0.796875, y: 0.265625 },\n { x: 0.796875, y: 0.265625 },\n { x: 0.828125, y: 0.265625 },\n { x: 0.828125, y: 0.265625 },\n { x: 0.859375, y: 0.265625 },\n { x: 0.859375, y: 0.265625 },\n { x: 0.890625, y: 0.265625 },\n { x: 0.890625, y: 0.265625 },\n { x: 0.921875, y: 0.265625 },\n { x: 0.921875, y: 0.265625 },\n { x: 0.953125, y: 0.265625 },\n { x: 0.953125, y: 0.265625 },\n { x: 0.984375, y: 0.265625 },\n { x: 0.984375, y: 0.265625 },\n { x: 0.015625, y: 0.296875 },\n { x: 0.015625, y: 0.296875 },\n { x: 0.046875, y: 0.296875 },\n { x: 0.046875, y: 0.296875 },\n { x: 0.078125, y: 0.296875 },\n { x: 0.078125, y: 0.296875 },\n { x: 0.109375, y: 0.296875 },\n { x: 0.109375, y: 0.296875 },\n { x: 0.140625, y: 0.296875 },\n { x: 0.140625, y: 0.296875 },\n { x: 0.171875, y: 0.296875 },\n { x: 0.171875, y: 0.296875 },\n { x: 0.203125, y: 0.296875 },\n { x: 0.203125, y: 0.296875 },\n { x: 0.234375, y: 0.296875 },\n { x: 0.234375, y: 0.296875 },\n { x: 0.265625, y: 0.296875 },\n { x: 0.265625, y: 0.296875 },\n { x: 0.296875, y: 0.296875 },\n { x: 0.296875, y: 0.296875 },\n { x: 0.328125, y: 0.296875 },\n { x: 0.328125, y: 0.296875 },\n { x: 0.359375, y: 0.296875 },\n { x: 0.359375, y: 0.296875 },\n { x: 0.390625, y: 0.296875 },\n { x: 0.390625, y: 0.296875 },\n { x: 0.421875, y: 0.296875 },\n { x: 0.421875, y: 0.296875 },\n { x: 0.453125, y: 0.296875 },\n { x: 0.453125, y: 0.296875 },\n { x: 0.484375, y: 0.296875 },\n { x: 0.484375, y: 0.296875 },\n { x: 0.515625, y: 0.296875 },\n { x: 0.515625, y: 0.296875 },\n { x: 0.546875, y: 0.296875 },\n { x: 0.546875, y: 0.296875 },\n { x: 0.578125, y: 0.296875 },\n { x: 0.578125, y: 0.296875 },\n { x: 0.609375, y: 0.296875 },\n { x: 0.609375, y: 0.296875 },\n { x: 0.640625, y: 0.296875 },\n { x: 0.640625, y: 0.296875 },\n { x: 0.671875, y: 0.296875 },\n { x: 0.671875, y: 0.296875 },\n { x: 0.703125, y: 0.296875 },\n { x: 0.703125, y: 0.296875 },\n { x: 0.734375, y: 0.296875 },\n { x: 0.734375, y: 0.296875 },\n { x: 0.765625, y: 0.296875 },\n { x: 0.765625, y: 0.296875 },\n { x: 0.796875, y: 0.296875 },\n { x: 0.796875, y: 0.296875 },\n { x: 0.828125, y: 0.296875 },\n { x: 0.828125, y: 0.296875 },\n { x: 0.859375, y: 0.296875 },\n { x: 0.859375, y: 0.296875 },\n { x: 0.890625, y: 0.296875 },\n { x: 0.890625, y: 0.296875 },\n { x: 0.921875, y: 0.296875 },\n { x: 0.921875, y: 0.296875 },\n { x: 0.953125, y: 0.296875 },\n { x: 0.953125, y: 0.296875 },\n { x: 0.984375, y: 0.296875 },\n { x: 0.984375, y: 0.296875 },\n { x: 0.015625, y: 0.328125 },\n { x: 0.015625, y: 0.328125 },\n { x: 0.046875, y: 0.328125 },\n { x: 0.046875, y: 0.328125 },\n { x: 0.078125, y: 0.328125 },\n { x: 0.078125, y: 0.328125 },\n { x: 0.109375, y: 0.328125 },\n { x: 0.109375, y: 0.328125 },\n { x: 0.140625, y: 0.328125 },\n { x: 0.140625, y: 0.328125 },\n { x: 0.171875, y: 0.328125 },\n { x: 0.171875, y: 0.328125 },\n { x: 0.203125, y: 0.328125 },\n { x: 0.203125, y: 0.328125 },\n { x: 0.234375, y: 0.328125 },\n { x: 0.234375, y: 0.328125 },\n { x: 0.265625, y: 0.328125 },\n { x: 0.265625, y: 0.328125 },\n { x: 0.296875, y: 0.328125 },\n { x: 0.296875, y: 0.328125 },\n { x: 0.328125, y: 0.328125 },\n { x: 0.328125, y: 0.328125 },\n { x: 0.359375, y: 0.328125 },\n { x: 0.359375, y: 0.328125 },\n { x: 0.390625, y: 0.328125 },\n { x: 0.390625, y: 0.328125 },\n { x: 0.421875, y: 0.328125 },\n { x: 0.421875, y: 0.328125 },\n { x: 0.453125, y: 0.328125 },\n { x: 0.453125, y: 0.328125 },\n { x: 0.484375, y: 0.328125 },\n { x: 0.484375, y: 0.328125 },\n { x: 0.515625, y: 0.328125 },\n { x: 0.515625, y: 0.328125 },\n { x: 0.546875, y: 0.328125 },\n { x: 0.546875, y: 0.328125 },\n { x: 0.578125, y: 0.328125 },\n { x: 0.578125, y: 0.328125 },\n { x: 0.609375, y: 0.328125 },\n { x: 0.609375, y: 0.328125 },\n { x: 0.640625, y: 0.328125 },\n { x: 0.640625, y: 0.328125 },\n { x: 0.671875, y: 0.328125 },\n { x: 0.671875, y: 0.328125 },\n { x: 0.703125, y: 0.328125 },\n { x: 0.703125, y: 0.328125 },\n { x: 0.734375, y: 0.328125 },\n { x: 0.734375, y: 0.328125 },\n { x: 0.765625, y: 0.328125 },\n { x: 0.765625, y: 0.328125 },\n { x: 0.796875, y: 0.328125 },\n { x: 0.796875, y: 0.328125 },\n { x: 0.828125, y: 0.328125 },\n { x: 0.828125, y: 0.328125 },\n { x: 0.859375, y: 0.328125 },\n { x: 0.859375, y: 0.328125 },\n { x: 0.890625, y: 0.328125 },\n { x: 0.890625, y: 0.328125 },\n { x: 0.921875, y: 0.328125 },\n { x: 0.921875, y: 0.328125 },\n { x: 0.953125, y: 0.328125 },\n { x: 0.953125, y: 0.328125 },\n { x: 0.984375, y: 0.328125 },\n { x: 0.984375, y: 0.328125 },\n { x: 0.015625, y: 0.359375 },\n { x: 0.015625, y: 0.359375 },\n { x: 0.046875, y: 0.359375 },\n { x: 0.046875, y: 0.359375 },\n { x: 0.078125, y: 0.359375 },\n { x: 0.078125, y: 0.359375 },\n { x: 0.109375, y: 0.359375 },\n { x: 0.109375, y: 0.359375 },\n { x: 0.140625, y: 0.359375 },\n { x: 0.140625, y: 0.359375 },\n { x: 0.171875, y: 0.359375 },\n { x: 0.171875, y: 0.359375 },\n { x: 0.203125, y: 0.359375 },\n { x: 0.203125, y: 0.359375 },\n { x: 0.234375, y: 0.359375 },\n { x: 0.234375, y: 0.359375 },\n { x: 0.265625, y: 0.359375 },\n { x: 0.265625, y: 0.359375 },\n { x: 0.296875, y: 0.359375 },\n { x: 0.296875, y: 0.359375 },\n { x: 0.328125, y: 0.359375 },\n { x: 0.328125, y: 0.359375 },\n { x: 0.359375, y: 0.359375 },\n { x: 0.359375, y: 0.359375 },\n { x: 0.390625, y: 0.359375 },\n { x: 0.390625, y: 0.359375 },\n { x: 0.421875, y: 0.359375 },\n { x: 0.421875, y: 0.359375 },\n { x: 0.453125, y: 0.359375 },\n { x: 0.453125, y: 0.359375 },\n { x: 0.484375, y: 0.359375 },\n { x: 0.484375, y: 0.359375 },\n { x: 0.515625, y: 0.359375 },\n { x: 0.515625, y: 0.359375 },\n { x: 0.546875, y: 0.359375 },\n { x: 0.546875, y: 0.359375 },\n { x: 0.578125, y: 0.359375 },\n { x: 0.578125, y: 0.359375 },\n { x: 0.609375, y: 0.359375 },\n { x: 0.609375, y: 0.359375 },\n { x: 0.640625, y: 0.359375 },\n { x: 0.640625, y: 0.359375 },\n { x: 0.671875, y: 0.359375 },\n { x: 0.671875, y: 0.359375 },\n { x: 0.703125, y: 0.359375 },\n { x: 0.703125, y: 0.359375 },\n { x: 0.734375, y: 0.359375 },\n { x: 0.734375, y: 0.359375 },\n { x: 0.765625, y: 0.359375 },\n { x: 0.765625, y: 0.359375 },\n { x: 0.796875, y: 0.359375 },\n { x: 0.796875, y: 0.359375 },\n { x: 0.828125, y: 0.359375 },\n { x: 0.828125, y: 0.359375 },\n { x: 0.859375, y: 0.359375 },\n { x: 0.859375, y: 0.359375 },\n { x: 0.890625, y: 0.359375 },\n { x: 0.890625, y: 0.359375 },\n { x: 0.921875, y: 0.359375 },\n { x: 0.921875, y: 0.359375 },\n { x: 0.953125, y: 0.359375 },\n { x: 0.953125, y: 0.359375 },\n { x: 0.984375, y: 0.359375 },\n { x: 0.984375, y: 0.359375 },\n { x: 0.015625, y: 0.390625 },\n { x: 0.015625, y: 0.390625 },\n { x: 0.046875, y: 0.390625 },\n { x: 0.046875, y: 0.390625 },\n { x: 0.078125, y: 0.390625 },\n { x: 0.078125, y: 0.390625 },\n { x: 0.109375, y: 0.390625 },\n { x: 0.109375, y: 0.390625 },\n { x: 0.140625, y: 0.390625 },\n { x: 0.140625, y: 0.390625 },\n { x: 0.171875, y: 0.390625 },\n { x: 0.171875, y: 0.390625 },\n { x: 0.203125, y: 0.390625 },\n { x: 0.203125, y: 0.390625 },\n { x: 0.234375, y: 0.390625 },\n { x: 0.234375, y: 0.390625 },\n { x: 0.265625, y: 0.390625 },\n { x: 0.265625, y: 0.390625 },\n { x: 0.296875, y: 0.390625 },\n { x: 0.296875, y: 0.390625 },\n { x: 0.328125, y: 0.390625 },\n { x: 0.328125, y: 0.390625 },\n { x: 0.359375, y: 0.390625 },\n { x: 0.359375, y: 0.390625 },\n { x: 0.390625, y: 0.390625 },\n { x: 0.390625, y: 0.390625 },\n { x: 0.421875, y: 0.390625 },\n { x: 0.421875, y: 0.390625 },\n { x: 0.453125, y: 0.390625 },\n { x: 0.453125, y: 0.390625 },\n { x: 0.484375, y: 0.390625 },\n { x: 0.484375, y: 0.390625 },\n { x: 0.515625, y: 0.390625 },\n { x: 0.515625, y: 0.390625 },\n { x: 0.546875, y: 0.390625 },\n { x: 0.546875, y: 0.390625 },\n { x: 0.578125, y: 0.390625 },\n { x: 0.578125, y: 0.390625 },\n { x: 0.609375, y: 0.390625 },\n { x: 0.609375, y: 0.390625 },\n { x: 0.640625, y: 0.390625 },\n { x: 0.640625, y: 0.390625 },\n { x: 0.671875, y: 0.390625 },\n { x: 0.671875, y: 0.390625 },\n { x: 0.703125, y: 0.390625 },\n { x: 0.703125, y: 0.390625 },\n { x: 0.734375, y: 0.390625 },\n { x: 0.734375, y: 0.390625 },\n { x: 0.765625, y: 0.390625 },\n { x: 0.765625, y: 0.390625 },\n { x: 0.796875, y: 0.390625 },\n { x: 0.796875, y: 0.390625 },\n { x: 0.828125, y: 0.390625 },\n { x: 0.828125, y: 0.390625 },\n { x: 0.859375, y: 0.390625 },\n { x: 0.859375, y: 0.390625 },\n { x: 0.890625, y: 0.390625 },\n { x: 0.890625, y: 0.390625 },\n { x: 0.921875, y: 0.390625 },\n { x: 0.921875, y: 0.390625 },\n { x: 0.953125, y: 0.390625 },\n { x: 0.953125, y: 0.390625 },\n { x: 0.984375, y: 0.390625 },\n { x: 0.984375, y: 0.390625 },\n { x: 0.015625, y: 0.421875 },\n { x: 0.015625, y: 0.421875 },\n { x: 0.046875, y: 0.421875 },\n { x: 0.046875, y: 0.421875 },\n { x: 0.078125, y: 0.421875 },\n { x: 0.078125, y: 0.421875 },\n { x: 0.109375, y: 0.421875 },\n { x: 0.109375, y: 0.421875 },\n { x: 0.140625, y: 0.421875 },\n { x: 0.140625, y: 0.421875 },\n { x: 0.171875, y: 0.421875 },\n { x: 0.171875, y: 0.421875 },\n { x: 0.203125, y: 0.421875 },\n { x: 0.203125, y: 0.421875 },\n { x: 0.234375, y: 0.421875 },\n { x: 0.234375, y: 0.421875 },\n { x: 0.265625, y: 0.421875 },\n { x: 0.265625, y: 0.421875 },\n { x: 0.296875, y: 0.421875 },\n { x: 0.296875, y: 0.421875 },\n { x: 0.328125, y: 0.421875 },\n { x: 0.328125, y: 0.421875 },\n { x: 0.359375, y: 0.421875 },\n { x: 0.359375, y: 0.421875 },\n { x: 0.390625, y: 0.421875 },\n { x: 0.390625, y: 0.421875 },\n { x: 0.421875, y: 0.421875 },\n { x: 0.421875, y: 0.421875 },\n { x: 0.453125, y: 0.421875 },\n { x: 0.453125, y: 0.421875 },\n { x: 0.484375, y: 0.421875 },\n { x: 0.484375, y: 0.421875 },\n { x: 0.515625, y: 0.421875 },\n { x: 0.515625, y: 0.421875 },\n { x: 0.546875, y: 0.421875 },\n { x: 0.546875, y: 0.421875 },\n { x: 0.578125, y: 0.421875 },\n { x: 0.578125, y: 0.421875 },\n { x: 0.609375, y: 0.421875 },\n { x: 0.609375, y: 0.421875 },\n { x: 0.640625, y: 0.421875 },\n { x: 0.640625, y: 0.421875 },\n { x: 0.671875, y: 0.421875 },\n { x: 0.671875, y: 0.421875 },\n { x: 0.703125, y: 0.421875 },\n { x: 0.703125, y: 0.421875 },\n { x: 0.734375, y: 0.421875 },\n { x: 0.734375, y: 0.421875 },\n { x: 0.765625, y: 0.421875 },\n { x: 0.765625, y: 0.421875 },\n { x: 0.796875, y: 0.421875 },\n { x: 0.796875, y: 0.421875 },\n { x: 0.828125, y: 0.421875 },\n { x: 0.828125, y: 0.421875 },\n { x: 0.859375, y: 0.421875 },\n { x: 0.859375, y: 0.421875 },\n { x: 0.890625, y: 0.421875 },\n { x: 0.890625, y: 0.421875 },\n { x: 0.921875, y: 0.421875 },\n { x: 0.921875, y: 0.421875 },\n { x: 0.953125, y: 0.421875 },\n { x: 0.953125, y: 0.421875 },\n { x: 0.984375, y: 0.421875 },\n { x: 0.984375, y: 0.421875 },\n { x: 0.015625, y: 0.453125 },\n { x: 0.015625, y: 0.453125 },\n { x: 0.046875, y: 0.453125 },\n { x: 0.046875, y: 0.453125 },\n { x: 0.078125, y: 0.453125 },\n { x: 0.078125, y: 0.453125 },\n { x: 0.109375, y: 0.453125 },\n { x: 0.109375, y: 0.453125 },\n { x: 0.140625, y: 0.453125 },\n { x: 0.140625, y: 0.453125 },\n { x: 0.171875, y: 0.453125 },\n { x: 0.171875, y: 0.453125 },\n { x: 0.203125, y: 0.453125 },\n { x: 0.203125, y: 0.453125 },\n { x: 0.234375, y: 0.453125 },\n { x: 0.234375, y: 0.453125 },\n { x: 0.265625, y: 0.453125 },\n { x: 0.265625, y: 0.453125 },\n { x: 0.296875, y: 0.453125 },\n { x: 0.296875, y: 0.453125 },\n { x: 0.328125, y: 0.453125 },\n { x: 0.328125, y: 0.453125 },\n { x: 0.359375, y: 0.453125 },\n { x: 0.359375, y: 0.453125 },\n { x: 0.390625, y: 0.453125 },\n { x: 0.390625, y: 0.453125 },\n { x: 0.421875, y: 0.453125 },\n { x: 0.421875, y: 0.453125 },\n { x: 0.453125, y: 0.453125 },\n { x: 0.453125, y: 0.453125 },\n { x: 0.484375, y: 0.453125 },\n { x: 0.484375, y: 0.453125 },\n { x: 0.515625, y: 0.453125 },\n { x: 0.515625, y: 0.453125 },\n { x: 0.546875, y: 0.453125 },\n { x: 0.546875, y: 0.453125 },\n { x: 0.578125, y: 0.453125 },\n { x: 0.578125, y: 0.453125 },\n { x: 0.609375, y: 0.453125 },\n { x: 0.609375, y: 0.453125 },\n { x: 0.640625, y: 0.453125 },\n { x: 0.640625, y: 0.453125 },\n { x: 0.671875, y: 0.453125 },\n { x: 0.671875, y: 0.453125 },\n { x: 0.703125, y: 0.453125 },\n { x: 0.703125, y: 0.453125 },\n { x: 0.734375, y: 0.453125 },\n { x: 0.734375, y: 0.453125 },\n { x: 0.765625, y: 0.453125 },\n { x: 0.765625, y: 0.453125 },\n { x: 0.796875, y: 0.453125 },\n { x: 0.796875, y: 0.453125 },\n { x: 0.828125, y: 0.453125 },\n { x: 0.828125, y: 0.453125 },\n { x: 0.859375, y: 0.453125 },\n { x: 0.859375, y: 0.453125 },\n { x: 0.890625, y: 0.453125 },\n { x: 0.890625, y: 0.453125 },\n { x: 0.921875, y: 0.453125 },\n { x: 0.921875, y: 0.453125 },\n { x: 0.953125, y: 0.453125 },\n { x: 0.953125, y: 0.453125 },\n { x: 0.984375, y: 0.453125 },\n { x: 0.984375, y: 0.453125 },\n { x: 0.015625, y: 0.484375 },\n { x: 0.015625, y: 0.484375 },\n { x: 0.046875, y: 0.484375 },\n { x: 0.046875, y: 0.484375 },\n { x: 0.078125, y: 0.484375 },\n { x: 0.078125, y: 0.484375 },\n { x: 0.109375, y: 0.484375 },\n { x: 0.109375, y: 0.484375 },\n { x: 0.140625, y: 0.484375 },\n { x: 0.140625, y: 0.484375 },\n { x: 0.171875, y: 0.484375 },\n { x: 0.171875, y: 0.484375 },\n { x: 0.203125, y: 0.484375 },\n { x: 0.203125, y: 0.484375 },\n { x: 0.234375, y: 0.484375 },\n { x: 0.234375, y: 0.484375 },\n { x: 0.265625, y: 0.484375 },\n { x: 0.265625, y: 0.484375 },\n { x: 0.296875, y: 0.484375 },\n { x: 0.296875, y: 0.484375 },\n { x: 0.328125, y: 0.484375 },\n { x: 0.328125, y: 0.484375 },\n { x: 0.359375, y: 0.484375 },\n { x: 0.359375, y: 0.484375 },\n { x: 0.390625, y: 0.484375 },\n { x: 0.390625, y: 0.484375 },\n { x: 0.421875, y: 0.484375 },\n { x: 0.421875, y: 0.484375 },\n { x: 0.453125, y: 0.484375 },\n { x: 0.453125, y: 0.484375 },\n { x: 0.484375, y: 0.484375 },\n { x: 0.484375, y: 0.484375 },\n { x: 0.515625, y: 0.484375 },\n { x: 0.515625, y: 0.484375 },\n { x: 0.546875, y: 0.484375 },\n { x: 0.546875, y: 0.484375 },\n { x: 0.578125, y: 0.484375 },\n { x: 0.578125, y: 0.484375 },\n { x: 0.609375, y: 0.484375 },\n { x: 0.609375, y: 0.484375 },\n { x: 0.640625, y: 0.484375 },\n { x: 0.640625, y: 0.484375 },\n { x: 0.671875, y: 0.484375 },\n { x: 0.671875, y: 0.484375 },\n { x: 0.703125, y: 0.484375 },\n { x: 0.703125, y: 0.484375 },\n { x: 0.734375, y: 0.484375 },\n { x: 0.734375, y: 0.484375 },\n { x: 0.765625, y: 0.484375 },\n { x: 0.765625, y: 0.484375 },\n { x: 0.796875, y: 0.484375 },\n { x: 0.796875, y: 0.484375 },\n { x: 0.828125, y: 0.484375 },\n { x: 0.828125, y: 0.484375 },\n { x: 0.859375, y: 0.484375 },\n { x: 0.859375, y: 0.484375 },\n { x: 0.890625, y: 0.484375 },\n { x: 0.890625, y: 0.484375 },\n { x: 0.921875, y: 0.484375 },\n { x: 0.921875, y: 0.484375 },\n { x: 0.953125, y: 0.484375 },\n { x: 0.953125, y: 0.484375 },\n { x: 0.984375, y: 0.484375 },\n { x: 0.984375, y: 0.484375 },\n { x: 0.015625, y: 0.515625 },\n { x: 0.015625, y: 0.515625 },\n { x: 0.046875, y: 0.515625 },\n { x: 0.046875, y: 0.515625 },\n { x: 0.078125, y: 0.515625 },\n { x: 0.078125, y: 0.515625 },\n { x: 0.109375, y: 0.515625 },\n { x: 0.109375, y: 0.515625 },\n { x: 0.140625, y: 0.515625 },\n { x: 0.140625, y: 0.515625 },\n { x: 0.171875, y: 0.515625 },\n { x: 0.171875, y: 0.515625 },\n { x: 0.203125, y: 0.515625 },\n { x: 0.203125, y: 0.515625 },\n { x: 0.234375, y: 0.515625 },\n { x: 0.234375, y: 0.515625 },\n { x: 0.265625, y: 0.515625 },\n { x: 0.265625, y: 0.515625 },\n { x: 0.296875, y: 0.515625 },\n { x: 0.296875, y: 0.515625 },\n { x: 0.328125, y: 0.515625 },\n { x: 0.328125, y: 0.515625 },\n { x: 0.359375, y: 0.515625 },\n { x: 0.359375, y: 0.515625 },\n { x: 0.390625, y: 0.515625 },\n { x: 0.390625, y: 0.515625 },\n { x: 0.421875, y: 0.515625 },\n { x: 0.421875, y: 0.515625 },\n { x: 0.453125, y: 0.515625 },\n { x: 0.453125, y: 0.515625 },\n { x: 0.484375, y: 0.515625 },\n { x: 0.484375, y: 0.515625 },\n { x: 0.515625, y: 0.515625 },\n { x: 0.515625, y: 0.515625 },\n { x: 0.546875, y: 0.515625 },\n { x: 0.546875, y: 0.515625 },\n { x: 0.578125, y: 0.515625 },\n { x: 0.578125, y: 0.515625 },\n { x: 0.609375, y: 0.515625 },\n { x: 0.609375, y: 0.515625 },\n { x: 0.640625, y: 0.515625 },\n { x: 0.640625, y: 0.515625 },\n { x: 0.671875, y: 0.515625 },\n { x: 0.671875, y: 0.515625 },\n { x: 0.703125, y: 0.515625 },\n { x: 0.703125, y: 0.515625 },\n { x: 0.734375, y: 0.515625 },\n { x: 0.734375, y: 0.515625 },\n { x: 0.765625, y: 0.515625 },\n { x: 0.765625, y: 0.515625 },\n { x: 0.796875, y: 0.515625 },\n { x: 0.796875, y: 0.515625 },\n { x: 0.828125, y: 0.515625 },\n { x: 0.828125, y: 0.515625 },\n { x: 0.859375, y: 0.515625 },\n { x: 0.859375, y: 0.515625 },\n { x: 0.890625, y: 0.515625 },\n { x: 0.890625, y: 0.515625 },\n { x: 0.921875, y: 0.515625 },\n { x: 0.921875, y: 0.515625 },\n { x: 0.953125, y: 0.515625 },\n { x: 0.953125, y: 0.515625 },\n { x: 0.984375, y: 0.515625 },\n { x: 0.984375, y: 0.515625 },\n { x: 0.015625, y: 0.546875 },\n { x: 0.015625, y: 0.546875 },\n { x: 0.046875, y: 0.546875 },\n { x: 0.046875, y: 0.546875 },\n { x: 0.078125, y: 0.546875 },\n { x: 0.078125, y: 0.546875 },\n { x: 0.109375, y: 0.546875 },\n { x: 0.109375, y: 0.546875 },\n { x: 0.140625, y: 0.546875 },\n { x: 0.140625, y: 0.546875 },\n { x: 0.171875, y: 0.546875 },\n { x: 0.171875, y: 0.546875 },\n { x: 0.203125, y: 0.546875 },\n { x: 0.203125, y: 0.546875 },\n { x: 0.234375, y: 0.546875 },\n { x: 0.234375, y: 0.546875 },\n { x: 0.265625, y: 0.546875 },\n { x: 0.265625, y: 0.546875 },\n { x: 0.296875, y: 0.546875 },\n { x: 0.296875, y: 0.546875 },\n { x: 0.328125, y: 0.546875 },\n { x: 0.328125, y: 0.546875 },\n { x: 0.359375, y: 0.546875 },\n { x: 0.359375, y: 0.546875 },\n { x: 0.390625, y: 0.546875 },\n { x: 0.390625, y: 0.546875 },\n { x: 0.421875, y: 0.546875 },\n { x: 0.421875, y: 0.546875 },\n { x: 0.453125, y: 0.546875 },\n { x: 0.453125, y: 0.546875 },\n { x: 0.484375, y: 0.546875 },\n { x: 0.484375, y: 0.546875 },\n { x: 0.515625, y: 0.546875 },\n { x: 0.515625, y: 0.546875 },\n { x: 0.546875, y: 0.546875 },\n { x: 0.546875, y: 0.546875 },\n { x: 0.578125, y: 0.546875 },\n { x: 0.578125, y: 0.546875 },\n { x: 0.609375, y: 0.546875 },\n { x: 0.609375, y: 0.546875 },\n { x: 0.640625, y: 0.546875 },\n { x: 0.640625, y: 0.546875 },\n { x: 0.671875, y: 0.546875 },\n { x: 0.671875, y: 0.546875 },\n { x: 0.703125, y: 0.546875 },\n { x: 0.703125, y: 0.546875 },\n { x: 0.734375, y: 0.546875 },\n { x: 0.734375, y: 0.546875 },\n { x: 0.765625, y: 0.546875 },\n { x: 0.765625, y: 0.546875 },\n { x: 0.796875, y: 0.546875 },\n { x: 0.796875, y: 0.546875 },\n { x: 0.828125, y: 0.546875 },\n { x: 0.828125, y: 0.546875 },\n { x: 0.859375, y: 0.546875 },\n { x: 0.859375, y: 0.546875 },\n { x: 0.890625, y: 0.546875 },\n { x: 0.890625, y: 0.546875 },\n { x: 0.921875, y: 0.546875 },\n { x: 0.921875, y: 0.546875 },\n { x: 0.953125, y: 0.546875 },\n { x: 0.953125, y: 0.546875 },\n { x: 0.984375, y: 0.546875 },\n { x: 0.984375, y: 0.546875 },\n { x: 0.015625, y: 0.578125 },\n { x: 0.015625, y: 0.578125 },\n { x: 0.046875, y: 0.578125 },\n { x: 0.046875, y: 0.578125 },\n { x: 0.078125, y: 0.578125 },\n { x: 0.078125, y: 0.578125 },\n { x: 0.109375, y: 0.578125 },\n { x: 0.109375, y: 0.578125 },\n { x: 0.140625, y: 0.578125 },\n { x: 0.140625, y: 0.578125 },\n { x: 0.171875, y: 0.578125 },\n { x: 0.171875, y: 0.578125 },\n { x: 0.203125, y: 0.578125 },\n { x: 0.203125, y: 0.578125 },\n { x: 0.234375, y: 0.578125 },\n { x: 0.234375, y: 0.578125 },\n { x: 0.265625, y: 0.578125 },\n { x: 0.265625, y: 0.578125 },\n { x: 0.296875, y: 0.578125 },\n { x: 0.296875, y: 0.578125 },\n { x: 0.328125, y: 0.578125 },\n { x: 0.328125, y: 0.578125 },\n { x: 0.359375, y: 0.578125 },\n { x: 0.359375, y: 0.578125 },\n { x: 0.390625, y: 0.578125 },\n { x: 0.390625, y: 0.578125 },\n { x: 0.421875, y: 0.578125 },\n { x: 0.421875, y: 0.578125 },\n { x: 0.453125, y: 0.578125 },\n { x: 0.453125, y: 0.578125 },\n { x: 0.484375, y: 0.578125 },\n { x: 0.484375, y: 0.578125 },\n { x: 0.515625, y: 0.578125 },\n { x: 0.515625, y: 0.578125 },\n { x: 0.546875, y: 0.578125 },\n { x: 0.546875, y: 0.578125 },\n { x: 0.578125, y: 0.578125 },\n { x: 0.578125, y: 0.578125 },\n { x: 0.609375, y: 0.578125 },\n { x: 0.609375, y: 0.578125 },\n { x: 0.640625, y: 0.578125 },\n { x: 0.640625, y: 0.578125 },\n { x: 0.671875, y: 0.578125 },\n { x: 0.671875, y: 0.578125 },\n { x: 0.703125, y: 0.578125 },\n { x: 0.703125, y: 0.578125 },\n { x: 0.734375, y: 0.578125 },\n { x: 0.734375, y: 0.578125 },\n { x: 0.765625, y: 0.578125 },\n { x: 0.765625, y: 0.578125 },\n { x: 0.796875, y: 0.578125 },\n { x: 0.796875, y: 0.578125 },\n { x: 0.828125, y: 0.578125 },\n { x: 0.828125, y: 0.578125 },\n { x: 0.859375, y: 0.578125 },\n { x: 0.859375, y: 0.578125 },\n { x: 0.890625, y: 0.578125 },\n { x: 0.890625, y: 0.578125 },\n { x: 0.921875, y: 0.578125 },\n { x: 0.921875, y: 0.578125 },\n { x: 0.953125, y: 0.578125 },\n { x: 0.953125, y: 0.578125 },\n { x: 0.984375, y: 0.578125 },\n { x: 0.984375, y: 0.578125 },\n { x: 0.015625, y: 0.609375 },\n { x: 0.015625, y: 0.609375 },\n { x: 0.046875, y: 0.609375 },\n { x: 0.046875, y: 0.609375 },\n { x: 0.078125, y: 0.609375 },\n { x: 0.078125, y: 0.609375 },\n { x: 0.109375, y: 0.609375 },\n { x: 0.109375, y: 0.609375 },\n { x: 0.140625, y: 0.609375 },\n { x: 0.140625, y: 0.609375 },\n { x: 0.171875, y: 0.609375 },\n { x: 0.171875, y: 0.609375 },\n { x: 0.203125, y: 0.609375 },\n { x: 0.203125, y: 0.609375 },\n { x: 0.234375, y: 0.609375 },\n { x: 0.234375, y: 0.609375 },\n { x: 0.265625, y: 0.609375 },\n { x: 0.265625, y: 0.609375 },\n { x: 0.296875, y: 0.609375 },\n { x: 0.296875, y: 0.609375 },\n { x: 0.328125, y: 0.609375 },\n { x: 0.328125, y: 0.609375 },\n { x: 0.359375, y: 0.609375 },\n { x: 0.359375, y: 0.609375 },\n { x: 0.390625, y: 0.609375 },\n { x: 0.390625, y: 0.609375 },\n { x: 0.421875, y: 0.609375 },\n { x: 0.421875, y: 0.609375 },\n { x: 0.453125, y: 0.609375 },\n { x: 0.453125, y: 0.609375 },\n { x: 0.484375, y: 0.609375 },\n { x: 0.484375, y: 0.609375 },\n { x: 0.515625, y: 0.609375 },\n { x: 0.515625, y: 0.609375 },\n { x: 0.546875, y: 0.609375 },\n { x: 0.546875, y: 0.609375 },\n { x: 0.578125, y: 0.609375 },\n { x: 0.578125, y: 0.609375 },\n { x: 0.609375, y: 0.609375 },\n { x: 0.609375, y: 0.609375 },\n { x: 0.640625, y: 0.609375 },\n { x: 0.640625, y: 0.609375 },\n { x: 0.671875, y: 0.609375 },\n { x: 0.671875, y: 0.609375 },\n { x: 0.703125, y: 0.609375 },\n { x: 0.703125, y: 0.609375 },\n { x: 0.734375, y: 0.609375 },\n { x: 0.734375, y: 0.609375 },\n { x: 0.765625, y: 0.609375 },\n { x: 0.765625, y: 0.609375 },\n { x: 0.796875, y: 0.609375 },\n { x: 0.796875, y: 0.609375 },\n { x: 0.828125, y: 0.609375 },\n { x: 0.828125, y: 0.609375 },\n { x: 0.859375, y: 0.609375 },\n { x: 0.859375, y: 0.609375 },\n { x: 0.890625, y: 0.609375 },\n { x: 0.890625, y: 0.609375 },\n { x: 0.921875, y: 0.609375 },\n { x: 0.921875, y: 0.609375 },\n { x: 0.953125, y: 0.609375 },\n { x: 0.953125, y: 0.609375 },\n { x: 0.984375, y: 0.609375 },\n { x: 0.984375, y: 0.609375 },\n { x: 0.015625, y: 0.640625 },\n { x: 0.015625, y: 0.640625 },\n { x: 0.046875, y: 0.640625 },\n { x: 0.046875, y: 0.640625 },\n { x: 0.078125, y: 0.640625 },\n { x: 0.078125, y: 0.640625 },\n { x: 0.109375, y: 0.640625 },\n { x: 0.109375, y: 0.640625 },\n { x: 0.140625, y: 0.640625 },\n { x: 0.140625, y: 0.640625 },\n { x: 0.171875, y: 0.640625 },\n { x: 0.171875, y: 0.640625 },\n { x: 0.203125, y: 0.640625 },\n { x: 0.203125, y: 0.640625 },\n { x: 0.234375, y: 0.640625 },\n { x: 0.234375, y: 0.640625 },\n { x: 0.265625, y: 0.640625 },\n { x: 0.265625, y: 0.640625 },\n { x: 0.296875, y: 0.640625 },\n { x: 0.296875, y: 0.640625 },\n { x: 0.328125, y: 0.640625 },\n { x: 0.328125, y: 0.640625 },\n { x: 0.359375, y: 0.640625 },\n { x: 0.359375, y: 0.640625 },\n { x: 0.390625, y: 0.640625 },\n { x: 0.390625, y: 0.640625 },\n { x: 0.421875, y: 0.640625 },\n { x: 0.421875, y: 0.640625 },\n { x: 0.453125, y: 0.640625 },\n { x: 0.453125, y: 0.640625 },\n { x: 0.484375, y: 0.640625 },\n { x: 0.484375, y: 0.640625 },\n { x: 0.515625, y: 0.640625 },\n { x: 0.515625, y: 0.640625 },\n { x: 0.546875, y: 0.640625 },\n { x: 0.546875, y: 0.640625 },\n { x: 0.578125, y: 0.640625 },\n { x: 0.578125, y: 0.640625 },\n { x: 0.609375, y: 0.640625 },\n { x: 0.609375, y: 0.640625 },\n { x: 0.640625, y: 0.640625 },\n { x: 0.640625, y: 0.640625 },\n { x: 0.671875, y: 0.640625 },\n { x: 0.671875, y: 0.640625 },\n { x: 0.703125, y: 0.640625 },\n { x: 0.703125, y: 0.640625 },\n { x: 0.734375, y: 0.640625 },\n { x: 0.734375, y: 0.640625 },\n { x: 0.765625, y: 0.640625 },\n { x: 0.765625, y: 0.640625 },\n { x: 0.796875, y: 0.640625 },\n { x: 0.796875, y: 0.640625 },\n { x: 0.828125, y: 0.640625 },\n { x: 0.828125, y: 0.640625 },\n { x: 0.859375, y: 0.640625 },\n { x: 0.859375, y: 0.640625 },\n { x: 0.890625, y: 0.640625 },\n { x: 0.890625, y: 0.640625 },\n { x: 0.921875, y: 0.640625 },\n { x: 0.921875, y: 0.640625 },\n { x: 0.953125, y: 0.640625 },\n { x: 0.953125, y: 0.640625 },\n { x: 0.984375, y: 0.640625 },\n { x: 0.984375, y: 0.640625 },\n { x: 0.015625, y: 0.671875 },\n { x: 0.015625, y: 0.671875 },\n { x: 0.046875, y: 0.671875 },\n { x: 0.046875, y: 0.671875 },\n { x: 0.078125, y: 0.671875 },\n { x: 0.078125, y: 0.671875 },\n { x: 0.109375, y: 0.671875 },\n { x: 0.109375, y: 0.671875 },\n { x: 0.140625, y: 0.671875 },\n { x: 0.140625, y: 0.671875 },\n { x: 0.171875, y: 0.671875 },\n { x: 0.171875, y: 0.671875 },\n { x: 0.203125, y: 0.671875 },\n { x: 0.203125, y: 0.671875 },\n { x: 0.234375, y: 0.671875 },\n { x: 0.234375, y: 0.671875 },\n { x: 0.265625, y: 0.671875 },\n { x: 0.265625, y: 0.671875 },\n { x: 0.296875, y: 0.671875 },\n { x: 0.296875, y: 0.671875 },\n { x: 0.328125, y: 0.671875 },\n { x: 0.328125, y: 0.671875 },\n { x: 0.359375, y: 0.671875 },\n { x: 0.359375, y: 0.671875 },\n { x: 0.390625, y: 0.671875 },\n { x: 0.390625, y: 0.671875 },\n { x: 0.421875, y: 0.671875 },\n { x: 0.421875, y: 0.671875 },\n { x: 0.453125, y: 0.671875 },\n { x: 0.453125, y: 0.671875 },\n { x: 0.484375, y: 0.671875 },\n { x: 0.484375, y: 0.671875 },\n { x: 0.515625, y: 0.671875 },\n { x: 0.515625, y: 0.671875 },\n { x: 0.546875, y: 0.671875 },\n { x: 0.546875, y: 0.671875 },\n { x: 0.578125, y: 0.671875 },\n { x: 0.578125, y: 0.671875 },\n { x: 0.609375, y: 0.671875 },\n { x: 0.609375, y: 0.671875 },\n { x: 0.640625, y: 0.671875 },\n { x: 0.640625, y: 0.671875 },\n { x: 0.671875, y: 0.671875 },\n { x: 0.671875, y: 0.671875 },\n { x: 0.703125, y: 0.671875 },\n { x: 0.703125, y: 0.671875 },\n { x: 0.734375, y: 0.671875 },\n { x: 0.734375, y: 0.671875 },\n { x: 0.765625, y: 0.671875 },\n { x: 0.765625, y: 0.671875 },\n { x: 0.796875, y: 0.671875 },\n { x: 0.796875, y: 0.671875 },\n { x: 0.828125, y: 0.671875 },\n { x: 0.828125, y: 0.671875 },\n { x: 0.859375, y: 0.671875 },\n { x: 0.859375, y: 0.671875 },\n { x: 0.890625, y: 0.671875 },\n { x: 0.890625, y: 0.671875 },\n { x: 0.921875, y: 0.671875 },\n { x: 0.921875, y: 0.671875 },\n { x: 0.953125, y: 0.671875 },\n { x: 0.953125, y: 0.671875 },\n { x: 0.984375, y: 0.671875 },\n { x: 0.984375, y: 0.671875 },\n { x: 0.015625, y: 0.703125 },\n { x: 0.015625, y: 0.703125 },\n { x: 0.046875, y: 0.703125 },\n { x: 0.046875, y: 0.703125 },\n { x: 0.078125, y: 0.703125 },\n { x: 0.078125, y: 0.703125 },\n { x: 0.109375, y: 0.703125 },\n { x: 0.109375, y: 0.703125 },\n { x: 0.140625, y: 0.703125 },\n { x: 0.140625, y: 0.703125 },\n { x: 0.171875, y: 0.703125 },\n { x: 0.171875, y: 0.703125 },\n { x: 0.203125, y: 0.703125 },\n { x: 0.203125, y: 0.703125 },\n { x: 0.234375, y: 0.703125 },\n { x: 0.234375, y: 0.703125 },\n { x: 0.265625, y: 0.703125 },\n { x: 0.265625, y: 0.703125 },\n { x: 0.296875, y: 0.703125 },\n { x: 0.296875, y: 0.703125 },\n { x: 0.328125, y: 0.703125 },\n { x: 0.328125, y: 0.703125 },\n { x: 0.359375, y: 0.703125 },\n { x: 0.359375, y: 0.703125 },\n { x: 0.390625, y: 0.703125 },\n { x: 0.390625, y: 0.703125 },\n { x: 0.421875, y: 0.703125 },\n { x: 0.421875, y: 0.703125 },\n { x: 0.453125, y: 0.703125 },\n { x: 0.453125, y: 0.703125 },\n { x: 0.484375, y: 0.703125 },\n { x: 0.484375, y: 0.703125 },\n { x: 0.515625, y: 0.703125 },\n { x: 0.515625, y: 0.703125 },\n { x: 0.546875, y: 0.703125 },\n { x: 0.546875, y: 0.703125 },\n { x: 0.578125, y: 0.703125 },\n { x: 0.578125, y: 0.703125 },\n { x: 0.609375, y: 0.703125 },\n { x: 0.609375, y: 0.703125 },\n { x: 0.640625, y: 0.703125 },\n { x: 0.640625, y: 0.703125 },\n { x: 0.671875, y: 0.703125 },\n { x: 0.671875, y: 0.703125 },\n { x: 0.703125, y: 0.703125 },\n { x: 0.703125, y: 0.703125 },\n { x: 0.734375, y: 0.703125 },\n { x: 0.734375, y: 0.703125 },\n { x: 0.765625, y: 0.703125 },\n { x: 0.765625, y: 0.703125 },\n { x: 0.796875, y: 0.703125 },\n { x: 0.796875, y: 0.703125 },\n { x: 0.828125, y: 0.703125 },\n { x: 0.828125, y: 0.703125 },\n { x: 0.859375, y: 0.703125 },\n { x: 0.859375, y: 0.703125 },\n { x: 0.890625, y: 0.703125 },\n { x: 0.890625, y: 0.703125 },\n { x: 0.921875, y: 0.703125 },\n { x: 0.921875, y: 0.703125 },\n { x: 0.953125, y: 0.703125 },\n { x: 0.953125, y: 0.703125 },\n { x: 0.984375, y: 0.703125 },\n { x: 0.984375, y: 0.703125 },\n { x: 0.015625, y: 0.734375 },\n { x: 0.015625, y: 0.734375 },\n { x: 0.046875, y: 0.734375 },\n { x: 0.046875, y: 0.734375 },\n { x: 0.078125, y: 0.734375 },\n { x: 0.078125, y: 0.734375 },\n { x: 0.109375, y: 0.734375 },\n { x: 0.109375, y: 0.734375 },\n { x: 0.140625, y: 0.734375 },\n { x: 0.140625, y: 0.734375 },\n { x: 0.171875, y: 0.734375 },\n { x: 0.171875, y: 0.734375 },\n { x: 0.203125, y: 0.734375 },\n { x: 0.203125, y: 0.734375 },\n { x: 0.234375, y: 0.734375 },\n { x: 0.234375, y: 0.734375 },\n { x: 0.265625, y: 0.734375 },\n { x: 0.265625, y: 0.734375 },\n { x: 0.296875, y: 0.734375 },\n { x: 0.296875, y: 0.734375 },\n { x: 0.328125, y: 0.734375 },\n { x: 0.328125, y: 0.734375 },\n { x: 0.359375, y: 0.734375 },\n { x: 0.359375, y: 0.734375 },\n { x: 0.390625, y: 0.734375 },\n { x: 0.390625, y: 0.734375 },\n { x: 0.421875, y: 0.734375 },\n { x: 0.421875, y: 0.734375 },\n { x: 0.453125, y: 0.734375 },\n { x: 0.453125, y: 0.734375 },\n { x: 0.484375, y: 0.734375 },\n { x: 0.484375, y: 0.734375 },\n { x: 0.515625, y: 0.734375 },\n { x: 0.515625, y: 0.734375 },\n { x: 0.546875, y: 0.734375 },\n { x: 0.546875, y: 0.734375 },\n { x: 0.578125, y: 0.734375 },\n { x: 0.578125, y: 0.734375 },\n { x: 0.609375, y: 0.734375 },\n { x: 0.609375, y: 0.734375 },\n { x: 0.640625, y: 0.734375 },\n { x: 0.640625, y: 0.734375 },\n { x: 0.671875, y: 0.734375 },\n { x: 0.671875, y: 0.734375 },\n { x: 0.703125, y: 0.734375 },\n { x: 0.703125, y: 0.734375 },\n { x: 0.734375, y: 0.734375 },\n { x: 0.734375, y: 0.734375 },\n { x: 0.765625, y: 0.734375 },\n { x: 0.765625, y: 0.734375 },\n { x: 0.796875, y: 0.734375 },\n { x: 0.796875, y: 0.734375 },\n { x: 0.828125, y: 0.734375 },\n { x: 0.828125, y: 0.734375 },\n { x: 0.859375, y: 0.734375 },\n { x: 0.859375, y: 0.734375 },\n { x: 0.890625, y: 0.734375 },\n { x: 0.890625, y: 0.734375 },\n { x: 0.921875, y: 0.734375 },\n { x: 0.921875, y: 0.734375 },\n { x: 0.953125, y: 0.734375 },\n { x: 0.953125, y: 0.734375 },\n { x: 0.984375, y: 0.734375 },\n { x: 0.984375, y: 0.734375 },\n { x: 0.015625, y: 0.765625 },\n { x: 0.015625, y: 0.765625 },\n { x: 0.046875, y: 0.765625 },\n { x: 0.046875, y: 0.765625 },\n { x: 0.078125, y: 0.765625 },\n { x: 0.078125, y: 0.765625 },\n { x: 0.109375, y: 0.765625 },\n { x: 0.109375, y: 0.765625 },\n { x: 0.140625, y: 0.765625 },\n { x: 0.140625, y: 0.765625 },\n { x: 0.171875, y: 0.765625 },\n { x: 0.171875, y: 0.765625 },\n { x: 0.203125, y: 0.765625 },\n { x: 0.203125, y: 0.765625 },\n { x: 0.234375, y: 0.765625 },\n { x: 0.234375, y: 0.765625 },\n { x: 0.265625, y: 0.765625 },\n { x: 0.265625, y: 0.765625 },\n { x: 0.296875, y: 0.765625 },\n { x: 0.296875, y: 0.765625 },\n { x: 0.328125, y: 0.765625 },\n { x: 0.328125, y: 0.765625 },\n { x: 0.359375, y: 0.765625 },\n { x: 0.359375, y: 0.765625 },\n { x: 0.390625, y: 0.765625 },\n { x: 0.390625, y: 0.765625 },\n { x: 0.421875, y: 0.765625 },\n { x: 0.421875, y: 0.765625 },\n { x: 0.453125, y: 0.765625 },\n { x: 0.453125, y: 0.765625 },\n { x: 0.484375, y: 0.765625 },\n { x: 0.484375, y: 0.765625 },\n { x: 0.515625, y: 0.765625 },\n { x: 0.515625, y: 0.765625 },\n { x: 0.546875, y: 0.765625 },\n { x: 0.546875, y: 0.765625 },\n { x: 0.578125, y: 0.765625 },\n { x: 0.578125, y: 0.765625 },\n { x: 0.609375, y: 0.765625 },\n { x: 0.609375, y: 0.765625 },\n { x: 0.640625, y: 0.765625 },\n { x: 0.640625, y: 0.765625 },\n { x: 0.671875, y: 0.765625 },\n { x: 0.671875, y: 0.765625 },\n { x: 0.703125, y: 0.765625 },\n { x: 0.703125, y: 0.765625 },\n { x: 0.734375, y: 0.765625 },\n { x: 0.734375, y: 0.765625 },\n { x: 0.765625, y: 0.765625 },\n { x: 0.765625, y: 0.765625 },\n { x: 0.796875, y: 0.765625 },\n { x: 0.796875, y: 0.765625 },\n { x: 0.828125, y: 0.765625 },\n { x: 0.828125, y: 0.765625 },\n { x: 0.859375, y: 0.765625 },\n { x: 0.859375, y: 0.765625 },\n { x: 0.890625, y: 0.765625 },\n { x: 0.890625, y: 0.765625 },\n { x: 0.921875, y: 0.765625 },\n { x: 0.921875, y: 0.765625 },\n { x: 0.953125, y: 0.765625 },\n { x: 0.953125, y: 0.765625 },\n { x: 0.984375, y: 0.765625 },\n { x: 0.984375, y: 0.765625 },\n { x: 0.015625, y: 0.796875 },\n { x: 0.015625, y: 0.796875 },\n { x: 0.046875, y: 0.796875 },\n { x: 0.046875, y: 0.796875 },\n { x: 0.078125, y: 0.796875 },\n { x: 0.078125, y: 0.796875 },\n { x: 0.109375, y: 0.796875 },\n { x: 0.109375, y: 0.796875 },\n { x: 0.140625, y: 0.796875 },\n { x: 0.140625, y: 0.796875 },\n { x: 0.171875, y: 0.796875 },\n { x: 0.171875, y: 0.796875 },\n { x: 0.203125, y: 0.796875 },\n { x: 0.203125, y: 0.796875 },\n { x: 0.234375, y: 0.796875 },\n { x: 0.234375, y: 0.796875 },\n { x: 0.265625, y: 0.796875 },\n { x: 0.265625, y: 0.796875 },\n { x: 0.296875, y: 0.796875 },\n { x: 0.296875, y: 0.796875 },\n { x: 0.328125, y: 0.796875 },\n { x: 0.328125, y: 0.796875 },\n { x: 0.359375, y: 0.796875 },\n { x: 0.359375, y: 0.796875 },\n { x: 0.390625, y: 0.796875 },\n { x: 0.390625, y: 0.796875 },\n { x: 0.421875, y: 0.796875 },\n { x: 0.421875, y: 0.796875 },\n { x: 0.453125, y: 0.796875 },\n { x: 0.453125, y: 0.796875 },\n { x: 0.484375, y: 0.796875 },\n { x: 0.484375, y: 0.796875 },\n { x: 0.515625, y: 0.796875 },\n { x: 0.515625, y: 0.796875 },\n { x: 0.546875, y: 0.796875 },\n { x: 0.546875, y: 0.796875 },\n { x: 0.578125, y: 0.796875 },\n { x: 0.578125, y: 0.796875 },\n { x: 0.609375, y: 0.796875 },\n { x: 0.609375, y: 0.796875 },\n { x: 0.640625, y: 0.796875 },\n { x: 0.640625, y: 0.796875 },\n { x: 0.671875, y: 0.796875 },\n { x: 0.671875, y: 0.796875 },\n { x: 0.703125, y: 0.796875 },\n { x: 0.703125, y: 0.796875 },\n { x: 0.734375, y: 0.796875 },\n { x: 0.734375, y: 0.796875 },\n { x: 0.765625, y: 0.796875 },\n { x: 0.765625, y: 0.796875 },\n { x: 0.796875, y: 0.796875 },\n { x: 0.796875, y: 0.796875 },\n { x: 0.828125, y: 0.796875 },\n { x: 0.828125, y: 0.796875 },\n { x: 0.859375, y: 0.796875 },\n { x: 0.859375, y: 0.796875 },\n { x: 0.890625, y: 0.796875 },\n { x: 0.890625, y: 0.796875 },\n { x: 0.921875, y: 0.796875 },\n { x: 0.921875, y: 0.796875 },\n { x: 0.953125, y: 0.796875 },\n { x: 0.953125, y: 0.796875 },\n { x: 0.984375, y: 0.796875 },\n { x: 0.984375, y: 0.796875 },\n { x: 0.015625, y: 0.828125 },\n { x: 0.015625, y: 0.828125 },\n { x: 0.046875, y: 0.828125 },\n { x: 0.046875, y: 0.828125 },\n { x: 0.078125, y: 0.828125 },\n { x: 0.078125, y: 0.828125 },\n { x: 0.109375, y: 0.828125 },\n { x: 0.109375, y: 0.828125 },\n { x: 0.140625, y: 0.828125 },\n { x: 0.140625, y: 0.828125 },\n { x: 0.171875, y: 0.828125 },\n { x: 0.171875, y: 0.828125 },\n { x: 0.203125, y: 0.828125 },\n { x: 0.203125, y: 0.828125 },\n { x: 0.234375, y: 0.828125 },\n { x: 0.234375, y: 0.828125 },\n { x: 0.265625, y: 0.828125 },\n { x: 0.265625, y: 0.828125 },\n { x: 0.296875, y: 0.828125 },\n { x: 0.296875, y: 0.828125 },\n { x: 0.328125, y: 0.828125 },\n { x: 0.328125, y: 0.828125 },\n { x: 0.359375, y: 0.828125 },\n { x: 0.359375, y: 0.828125 },\n { x: 0.390625, y: 0.828125 },\n { x: 0.390625, y: 0.828125 },\n { x: 0.421875, y: 0.828125 },\n { x: 0.421875, y: 0.828125 },\n { x: 0.453125, y: 0.828125 },\n { x: 0.453125, y: 0.828125 },\n { x: 0.484375, y: 0.828125 },\n { x: 0.484375, y: 0.828125 },\n { x: 0.515625, y: 0.828125 },\n { x: 0.515625, y: 0.828125 },\n { x: 0.546875, y: 0.828125 },\n { x: 0.546875, y: 0.828125 },\n { x: 0.578125, y: 0.828125 },\n { x: 0.578125, y: 0.828125 },\n { x: 0.609375, y: 0.828125 },\n { x: 0.609375, y: 0.828125 },\n { x: 0.640625, y: 0.828125 },\n { x: 0.640625, y: 0.828125 },\n { x: 0.671875, y: 0.828125 },\n { x: 0.671875, y: 0.828125 },\n { x: 0.703125, y: 0.828125 },\n { x: 0.703125, y: 0.828125 },\n { x: 0.734375, y: 0.828125 },\n { x: 0.734375, y: 0.828125 },\n { x: 0.765625, y: 0.828125 },\n { x: 0.765625, y: 0.828125 },\n { x: 0.796875, y: 0.828125 },\n { x: 0.796875, y: 0.828125 },\n { x: 0.828125, y: 0.828125 },\n { x: 0.828125, y: 0.828125 },\n { x: 0.859375, y: 0.828125 },\n { x: 0.859375, y: 0.828125 },\n { x: 0.890625, y: 0.828125 },\n { x: 0.890625, y: 0.828125 },\n { x: 0.921875, y: 0.828125 },\n { x: 0.921875, y: 0.828125 },\n { x: 0.953125, y: 0.828125 },\n { x: 0.953125, y: 0.828125 },\n { x: 0.984375, y: 0.828125 },\n { x: 0.984375, y: 0.828125 },\n { x: 0.015625, y: 0.859375 },\n { x: 0.015625, y: 0.859375 },\n { x: 0.046875, y: 0.859375 },\n { x: 0.046875, y: 0.859375 },\n { x: 0.078125, y: 0.859375 },\n { x: 0.078125, y: 0.859375 },\n { x: 0.109375, y: 0.859375 },\n { x: 0.109375, y: 0.859375 },\n { x: 0.140625, y: 0.859375 },\n { x: 0.140625, y: 0.859375 },\n { x: 0.171875, y: 0.859375 },\n { x: 0.171875, y: 0.859375 },\n { x: 0.203125, y: 0.859375 },\n { x: 0.203125, y: 0.859375 },\n { x: 0.234375, y: 0.859375 },\n { x: 0.234375, y: 0.859375 },\n { x: 0.265625, y: 0.859375 },\n { x: 0.265625, y: 0.859375 },\n { x: 0.296875, y: 0.859375 },\n { x: 0.296875, y: 0.859375 },\n { x: 0.328125, y: 0.859375 },\n { x: 0.328125, y: 0.859375 },\n { x: 0.359375, y: 0.859375 },\n { x: 0.359375, y: 0.859375 },\n { x: 0.390625, y: 0.859375 },\n { x: 0.390625, y: 0.859375 },\n { x: 0.421875, y: 0.859375 },\n { x: 0.421875, y: 0.859375 },\n { x: 0.453125, y: 0.859375 },\n { x: 0.453125, y: 0.859375 },\n { x: 0.484375, y: 0.859375 },\n { x: 0.484375, y: 0.859375 },\n { x: 0.515625, y: 0.859375 },\n { x: 0.515625, y: 0.859375 },\n { x: 0.546875, y: 0.859375 },\n { x: 0.546875, y: 0.859375 },\n { x: 0.578125, y: 0.859375 },\n { x: 0.578125, y: 0.859375 },\n { x: 0.609375, y: 0.859375 },\n { x: 0.609375, y: 0.859375 },\n { x: 0.640625, y: 0.859375 },\n { x: 0.640625, y: 0.859375 },\n { x: 0.671875, y: 0.859375 },\n { x: 0.671875, y: 0.859375 },\n { x: 0.703125, y: 0.859375 },\n { x: 0.703125, y: 0.859375 },\n { x: 0.734375, y: 0.859375 },\n { x: 0.734375, y: 0.859375 },\n { x: 0.765625, y: 0.859375 },\n { x: 0.765625, y: 0.859375 },\n { x: 0.796875, y: 0.859375 },\n { x: 0.796875, y: 0.859375 },\n { x: 0.828125, y: 0.859375 },\n { x: 0.828125, y: 0.859375 },\n { x: 0.859375, y: 0.859375 },\n { x: 0.859375, y: 0.859375 },\n { x: 0.890625, y: 0.859375 },\n { x: 0.890625, y: 0.859375 },\n { x: 0.921875, y: 0.859375 },\n { x: 0.921875, y: 0.859375 },\n { x: 0.953125, y: 0.859375 },\n { x: 0.953125, y: 0.859375 },\n { x: 0.984375, y: 0.859375 },\n { x: 0.984375, y: 0.859375 },\n { x: 0.015625, y: 0.890625 },\n { x: 0.015625, y: 0.890625 },\n { x: 0.046875, y: 0.890625 },\n { x: 0.046875, y: 0.890625 },\n { x: 0.078125, y: 0.890625 },\n { x: 0.078125, y: 0.890625 },\n { x: 0.109375, y: 0.890625 },\n { x: 0.109375, y: 0.890625 },\n { x: 0.140625, y: 0.890625 },\n { x: 0.140625, y: 0.890625 },\n { x: 0.171875, y: 0.890625 },\n { x: 0.171875, y: 0.890625 },\n { x: 0.203125, y: 0.890625 },\n { x: 0.203125, y: 0.890625 },\n { x: 0.234375, y: 0.890625 },\n { x: 0.234375, y: 0.890625 },\n { x: 0.265625, y: 0.890625 },\n { x: 0.265625, y: 0.890625 },\n { x: 0.296875, y: 0.890625 },\n { x: 0.296875, y: 0.890625 },\n { x: 0.328125, y: 0.890625 },\n { x: 0.328125, y: 0.890625 },\n { x: 0.359375, y: 0.890625 },\n { x: 0.359375, y: 0.890625 },\n { x: 0.390625, y: 0.890625 },\n { x: 0.390625, y: 0.890625 },\n { x: 0.421875, y: 0.890625 },\n { x: 0.421875, y: 0.890625 },\n { x: 0.453125, y: 0.890625 },\n { x: 0.453125, y: 0.890625 },\n { x: 0.484375, y: 0.890625 },\n { x: 0.484375, y: 0.890625 },\n { x: 0.515625, y: 0.890625 },\n { x: 0.515625, y: 0.890625 },\n { x: 0.546875, y: 0.890625 },\n { x: 0.546875, y: 0.890625 },\n { x: 0.578125, y: 0.890625 },\n { x: 0.578125, y: 0.890625 },\n { x: 0.609375, y: 0.890625 },\n { x: 0.609375, y: 0.890625 },\n { x: 0.640625, y: 0.890625 },\n { x: 0.640625, y: 0.890625 },\n { x: 0.671875, y: 0.890625 },\n { x: 0.671875, y: 0.890625 },\n { x: 0.703125, y: 0.890625 },\n { x: 0.703125, y: 0.890625 },\n { x: 0.734375, y: 0.890625 },\n { x: 0.734375, y: 0.890625 },\n { x: 0.765625, y: 0.890625 },\n { x: 0.765625, y: 0.890625 },\n { x: 0.796875, y: 0.890625 },\n { x: 0.796875, y: 0.890625 },\n { x: 0.828125, y: 0.890625 },\n { x: 0.828125, y: 0.890625 },\n { x: 0.859375, y: 0.890625 },\n { x: 0.859375, y: 0.890625 },\n { x: 0.890625, y: 0.890625 },\n { x: 0.890625, y: 0.890625 },\n { x: 0.921875, y: 0.890625 },\n { x: 0.921875, y: 0.890625 },\n { x: 0.953125, y: 0.890625 },\n { x: 0.953125, y: 0.890625 },\n { x: 0.984375, y: 0.890625 },\n { x: 0.984375, y: 0.890625 },\n { x: 0.015625, y: 0.921875 },\n { x: 0.015625, y: 0.921875 },\n { x: 0.046875, y: 0.921875 },\n { x: 0.046875, y: 0.921875 },\n { x: 0.078125, y: 0.921875 },\n { x: 0.078125, y: 0.921875 },\n { x: 0.109375, y: 0.921875 },\n { x: 0.109375, y: 0.921875 },\n { x: 0.140625, y: 0.921875 },\n { x: 0.140625, y: 0.921875 },\n { x: 0.171875, y: 0.921875 },\n { x: 0.171875, y: 0.921875 },\n { x: 0.203125, y: 0.921875 },\n { x: 0.203125, y: 0.921875 },\n { x: 0.234375, y: 0.921875 },\n { x: 0.234375, y: 0.921875 },\n { x: 0.265625, y: 0.921875 },\n { x: 0.265625, y: 0.921875 },\n { x: 0.296875, y: 0.921875 },\n { x: 0.296875, y: 0.921875 },\n { x: 0.328125, y: 0.921875 },\n { x: 0.328125, y: 0.921875 },\n { x: 0.359375, y: 0.921875 },\n { x: 0.359375, y: 0.921875 },\n { x: 0.390625, y: 0.921875 },\n { x: 0.390625, y: 0.921875 },\n { x: 0.421875, y: 0.921875 },\n { x: 0.421875, y: 0.921875 },\n { x: 0.453125, y: 0.921875 },\n { x: 0.453125, y: 0.921875 },\n { x: 0.484375, y: 0.921875 },\n { x: 0.484375, y: 0.921875 },\n { x: 0.515625, y: 0.921875 },\n { x: 0.515625, y: 0.921875 },\n { x: 0.546875, y: 0.921875 },\n { x: 0.546875, y: 0.921875 },\n { x: 0.578125, y: 0.921875 },\n { x: 0.578125, y: 0.921875 },\n { x: 0.609375, y: 0.921875 },\n { x: 0.609375, y: 0.921875 },\n { x: 0.640625, y: 0.921875 },\n { x: 0.640625, y: 0.921875 },\n { x: 0.671875, y: 0.921875 },\n { x: 0.671875, y: 0.921875 },\n { x: 0.703125, y: 0.921875 },\n { x: 0.703125, y: 0.921875 },\n { x: 0.734375, y: 0.921875 },\n { x: 0.734375, y: 0.921875 },\n { x: 0.765625, y: 0.921875 },\n { x: 0.765625, y: 0.921875 },\n { x: 0.796875, y: 0.921875 },\n { x: 0.796875, y: 0.921875 },\n { x: 0.828125, y: 0.921875 },\n { x: 0.828125, y: 0.921875 },\n { x: 0.859375, y: 0.921875 },\n { x: 0.859375, y: 0.921875 },\n { x: 0.890625, y: 0.921875 },\n { x: 0.890625, y: 0.921875 },\n { x: 0.921875, y: 0.921875 },\n { x: 0.921875, y: 0.921875 },\n { x: 0.953125, y: 0.921875 },\n { x: 0.953125, y: 0.921875 },\n { x: 0.984375, y: 0.921875 },\n { x: 0.984375, y: 0.921875 },\n { x: 0.015625, y: 0.953125 },\n { x: 0.015625, y: 0.953125 },\n { x: 0.046875, y: 0.953125 },\n { x: 0.046875, y: 0.953125 },\n { x: 0.078125, y: 0.953125 },\n { x: 0.078125, y: 0.953125 },\n { x: 0.109375, y: 0.953125 },\n { x: 0.109375, y: 0.953125 },\n { x: 0.140625, y: 0.953125 },\n { x: 0.140625, y: 0.953125 },\n { x: 0.171875, y: 0.953125 },\n { x: 0.171875, y: 0.953125 },\n { x: 0.203125, y: 0.953125 },\n { x: 0.203125, y: 0.953125 },\n { x: 0.234375, y: 0.953125 },\n { x: 0.234375, y: 0.953125 },\n { x: 0.265625, y: 0.953125 },\n { x: 0.265625, y: 0.953125 },\n { x: 0.296875, y: 0.953125 },\n { x: 0.296875, y: 0.953125 },\n { x: 0.328125, y: 0.953125 },\n { x: 0.328125, y: 0.953125 },\n { x: 0.359375, y: 0.953125 },\n { x: 0.359375, y: 0.953125 },\n { x: 0.390625, y: 0.953125 },\n { x: 0.390625, y: 0.953125 },\n { x: 0.421875, y: 0.953125 },\n { x: 0.421875, y: 0.953125 },\n { x: 0.453125, y: 0.953125 },\n { x: 0.453125, y: 0.953125 },\n { x: 0.484375, y: 0.953125 },\n { x: 0.484375, y: 0.953125 },\n { x: 0.515625, y: 0.953125 },\n { x: 0.515625, y: 0.953125 },\n { x: 0.546875, y: 0.953125 },\n { x: 0.546875, y: 0.953125 },\n { x: 0.578125, y: 0.953125 },\n { x: 0.578125, y: 0.953125 },\n { x: 0.609375, y: 0.953125 },\n { x: 0.609375, y: 0.953125 },\n { x: 0.640625, y: 0.953125 },\n { x: 0.640625, y: 0.953125 },\n { x: 0.671875, y: 0.953125 },\n { x: 0.671875, y: 0.953125 },\n { x: 0.703125, y: 0.953125 },\n { x: 0.703125, y: 0.953125 },\n { x: 0.734375, y: 0.953125 },\n { x: 0.734375, y: 0.953125 },\n { x: 0.765625, y: 0.953125 },\n { x: 0.765625, y: 0.953125 },\n { x: 0.796875, y: 0.953125 },\n { x: 0.796875, y: 0.953125 },\n { x: 0.828125, y: 0.953125 },\n { x: 0.828125, y: 0.953125 },\n { x: 0.859375, y: 0.953125 },\n { x: 0.859375, y: 0.953125 },\n { x: 0.890625, y: 0.953125 },\n { x: 0.890625, y: 0.953125 },\n { x: 0.921875, y: 0.953125 },\n { x: 0.921875, y: 0.953125 },\n { x: 0.953125, y: 0.953125 },\n { x: 0.953125, y: 0.953125 },\n { x: 0.984375, y: 0.953125 },\n { x: 0.984375, y: 0.953125 },\n { x: 0.015625, y: 0.984375 },\n { x: 0.015625, y: 0.984375 },\n { x: 0.046875, y: 0.984375 },\n { x: 0.046875, y: 0.984375 },\n { x: 0.078125, y: 0.984375 },\n { x: 0.078125, y: 0.984375 },\n { x: 0.109375, y: 0.984375 },\n { x: 0.109375, y: 0.984375 },\n { x: 0.140625, y: 0.984375 },\n { x: 0.140625, y: 0.984375 },\n { x: 0.171875, y: 0.984375 },\n { x: 0.171875, y: 0.984375 },\n { x: 0.203125, y: 0.984375 },\n { x: 0.203125, y: 0.984375 },\n { x: 0.234375, y: 0.984375 },\n { x: 0.234375, y: 0.984375 },\n { x: 0.265625, y: 0.984375 },\n { x: 0.265625, y: 0.984375 },\n { x: 0.296875, y: 0.984375 },\n { x: 0.296875, y: 0.984375 },\n { x: 0.328125, y: 0.984375 },\n { x: 0.328125, y: 0.984375 },\n { x: 0.359375, y: 0.984375 },\n { x: 0.359375, y: 0.984375 },\n { x: 0.390625, y: 0.984375 },\n { x: 0.390625, y: 0.984375 },\n { x: 0.421875, y: 0.984375 },\n { x: 0.421875, y: 0.984375 },\n { x: 0.453125, y: 0.984375 },\n { x: 0.453125, y: 0.984375 },\n { x: 0.484375, y: 0.984375 },\n { x: 0.484375, y: 0.984375 },\n { x: 0.515625, y: 0.984375 },\n { x: 0.515625, y: 0.984375 },\n { x: 0.546875, y: 0.984375 },\n { x: 0.546875, y: 0.984375 },\n { x: 0.578125, y: 0.984375 },\n { x: 0.578125, y: 0.984375 },\n { x: 0.609375, y: 0.984375 },\n { x: 0.609375, y: 0.984375 },\n { x: 0.640625, y: 0.984375 },\n { x: 0.640625, y: 0.984375 },\n { x: 0.671875, y: 0.984375 },\n { x: 0.671875, y: 0.984375 },\n { x: 0.703125, y: 0.984375 },\n { x: 0.703125, y: 0.984375 },\n { x: 0.734375, y: 0.984375 },\n { x: 0.734375, y: 0.984375 },\n { x: 0.765625, y: 0.984375 },\n { x: 0.765625, y: 0.984375 },\n { x: 0.796875, y: 0.984375 },\n { x: 0.796875, y: 0.984375 },\n { x: 0.828125, y: 0.984375 },\n { x: 0.828125, y: 0.984375 },\n { x: 0.859375, y: 0.984375 },\n { x: 0.859375, y: 0.984375 },\n { x: 0.890625, y: 0.984375 },\n { x: 0.890625, y: 0.984375 },\n { x: 0.921875, y: 0.984375 },\n { x: 0.921875, y: 0.984375 },\n { x: 0.953125, y: 0.984375 },\n { x: 0.953125, y: 0.984375 },\n { x: 0.984375, y: 0.984375 },\n { x: 0.984375, y: 0.984375 },\n { x: 0.03125, y: 0.03125 },\n { x: 0.03125, y: 0.03125 },\n { x: 0.09375, y: 0.03125 },\n { x: 0.09375, y: 0.03125 },\n { x: 0.15625, y: 0.03125 },\n { x: 0.15625, y: 0.03125 },\n { x: 0.21875, y: 0.03125 },\n { x: 0.21875, y: 0.03125 },\n { x: 0.28125, y: 0.03125 },\n { x: 0.28125, y: 0.03125 },\n { x: 0.34375, y: 0.03125 },\n { x: 0.34375, y: 0.03125 },\n { x: 0.40625, y: 0.03125 },\n { x: 0.40625, y: 0.03125 },\n { x: 0.46875, y: 0.03125 },\n { x: 0.46875, y: 0.03125 },\n { x: 0.53125, y: 0.03125 },\n { x: 0.53125, y: 0.03125 },\n { x: 0.59375, y: 0.03125 },\n { x: 0.59375, y: 0.03125 },\n { x: 0.65625, y: 0.03125 },\n { x: 0.65625, y: 0.03125 },\n { x: 0.71875, y: 0.03125 },\n { x: 0.71875, y: 0.03125 },\n { x: 0.78125, y: 0.03125 },\n { x: 0.78125, y: 0.03125 },\n { x: 0.84375, y: 0.03125 },\n { x: 0.84375, y: 0.03125 },\n { x: 0.90625, y: 0.03125 },\n { x: 0.90625, y: 0.03125 },\n { x: 0.96875, y: 0.03125 },\n { x: 0.96875, y: 0.03125 },\n { x: 0.03125, y: 0.09375 },\n { x: 0.03125, y: 0.09375 },\n { x: 0.09375, y: 0.09375 },\n { x: 0.09375, y: 0.09375 },\n { x: 0.15625, y: 0.09375 },\n { x: 0.15625, y: 0.09375 },\n { x: 0.21875, y: 0.09375 },\n { x: 0.21875, y: 0.09375 },\n { x: 0.28125, y: 0.09375 },\n { x: 0.28125, y: 0.09375 },\n { x: 0.34375, y: 0.09375 },\n { x: 0.34375, y: 0.09375 },\n { x: 0.40625, y: 0.09375 },\n { x: 0.40625, y: 0.09375 },\n { x: 0.46875, y: 0.09375 },\n { x: 0.46875, y: 0.09375 },\n { x: 0.53125, y: 0.09375 },\n { x: 0.53125, y: 0.09375 },\n { x: 0.59375, y: 0.09375 },\n { x: 0.59375, y: 0.09375 },\n { x: 0.65625, y: 0.09375 },\n { x: 0.65625, y: 0.09375 },\n { x: 0.71875, y: 0.09375 },\n { x: 0.71875, y: 0.09375 },\n { x: 0.78125, y: 0.09375 },\n { x: 0.78125, y: 0.09375 },\n { x: 0.84375, y: 0.09375 },\n { x: 0.84375, y: 0.09375 },\n { x: 0.90625, y: 0.09375 },\n { x: 0.90625, y: 0.09375 },\n { x: 0.96875, y: 0.09375 },\n { x: 0.96875, y: 0.09375 },\n { x: 0.03125, y: 0.15625 },\n { x: 0.03125, y: 0.15625 },\n { x: 0.09375, y: 0.15625 },\n { x: 0.09375, y: 0.15625 },\n { x: 0.15625, y: 0.15625 },\n { x: 0.15625, y: 0.15625 },\n { x: 0.21875, y: 0.15625 },\n { x: 0.21875, y: 0.15625 },\n { x: 0.28125, y: 0.15625 },\n { x: 0.28125, y: 0.15625 },\n { x: 0.34375, y: 0.15625 },\n { x: 0.34375, y: 0.15625 },\n { x: 0.40625, y: 0.15625 },\n { x: 0.40625, y: 0.15625 },\n { x: 0.46875, y: 0.15625 },\n { x: 0.46875, y: 0.15625 },\n { x: 0.53125, y: 0.15625 },\n { x: 0.53125, y: 0.15625 },\n { x: 0.59375, y: 0.15625 },\n { x: 0.59375, y: 0.15625 },\n { x: 0.65625, y: 0.15625 },\n { x: 0.65625, y: 0.15625 },\n { x: 0.71875, y: 0.15625 },\n { x: 0.71875, y: 0.15625 },\n { x: 0.78125, y: 0.15625 },\n { x: 0.78125, y: 0.15625 },\n { x: 0.84375, y: 0.15625 },\n { x: 0.84375, y: 0.15625 },\n { x: 0.90625, y: 0.15625 },\n { x: 0.90625, y: 0.15625 },\n { x: 0.96875, y: 0.15625 },\n { x: 0.96875, y: 0.15625 },\n { x: 0.03125, y: 0.21875 },\n { x: 0.03125, y: 0.21875 },\n { x: 0.09375, y: 0.21875 },\n { x: 0.09375, y: 0.21875 },\n { x: 0.15625, y: 0.21875 },\n { x: 0.15625, y: 0.21875 },\n { x: 0.21875, y: 0.21875 },\n { x: 0.21875, y: 0.21875 },\n { x: 0.28125, y: 0.21875 },\n { x: 0.28125, y: 0.21875 },\n { x: 0.34375, y: 0.21875 },\n { x: 0.34375, y: 0.21875 },\n { x: 0.40625, y: 0.21875 },\n { x: 0.40625, y: 0.21875 },\n { x: 0.46875, y: 0.21875 },\n { x: 0.46875, y: 0.21875 },\n { x: 0.53125, y: 0.21875 },\n { x: 0.53125, y: 0.21875 },\n { x: 0.59375, y: 0.21875 },\n { x: 0.59375, y: 0.21875 },\n { x: 0.65625, y: 0.21875 },\n { x: 0.65625, y: 0.21875 },\n { x: 0.71875, y: 0.21875 },\n { x: 0.71875, y: 0.21875 },\n { x: 0.78125, y: 0.21875 },\n { x: 0.78125, y: 0.21875 },\n { x: 0.84375, y: 0.21875 },\n { x: 0.84375, y: 0.21875 },\n { x: 0.90625, y: 0.21875 },\n { x: 0.90625, y: 0.21875 },\n { x: 0.96875, y: 0.21875 },\n { x: 0.96875, y: 0.21875 },\n { x: 0.03125, y: 0.28125 },\n { x: 0.03125, y: 0.28125 },\n { x: 0.09375, y: 0.28125 },\n { x: 0.09375, y: 0.28125 },\n { x: 0.15625, y: 0.28125 },\n { x: 0.15625, y: 0.28125 },\n { x: 0.21875, y: 0.28125 },\n { x: 0.21875, y: 0.28125 },\n { x: 0.28125, y: 0.28125 },\n { x: 0.28125, y: 0.28125 },\n { x: 0.34375, y: 0.28125 },\n { x: 0.34375, y: 0.28125 },\n { x: 0.40625, y: 0.28125 },\n { x: 0.40625, y: 0.28125 },\n { x: 0.46875, y: 0.28125 },\n { x: 0.46875, y: 0.28125 },\n { x: 0.53125, y: 0.28125 },\n { x: 0.53125, y: 0.28125 },\n { x: 0.59375, y: 0.28125 },\n { x: 0.59375, y: 0.28125 },\n { x: 0.65625, y: 0.28125 },\n { x: 0.65625, y: 0.28125 },\n { x: 0.71875, y: 0.28125 },\n { x: 0.71875, y: 0.28125 },\n { x: 0.78125, y: 0.28125 },\n { x: 0.78125, y: 0.28125 },\n { x: 0.84375, y: 0.28125 },\n { x: 0.84375, y: 0.28125 },\n { x: 0.90625, y: 0.28125 },\n { x: 0.90625, y: 0.28125 },\n { x: 0.96875, y: 0.28125 },\n { x: 0.96875, y: 0.28125 },\n { x: 0.03125, y: 0.34375 },\n { x: 0.03125, y: 0.34375 },\n { x: 0.09375, y: 0.34375 },\n { x: 0.09375, y: 0.34375 },\n { x: 0.15625, y: 0.34375 },\n { x: 0.15625, y: 0.34375 },\n { x: 0.21875, y: 0.34375 },\n { x: 0.21875, y: 0.34375 },\n { x: 0.28125, y: 0.34375 },\n { x: 0.28125, y: 0.34375 },\n { x: 0.34375, y: 0.34375 },\n { x: 0.34375, y: 0.34375 },\n { x: 0.40625, y: 0.34375 },\n { x: 0.40625, y: 0.34375 },\n { x: 0.46875, y: 0.34375 },\n { x: 0.46875, y: 0.34375 },\n { x: 0.53125, y: 0.34375 },\n { x: 0.53125, y: 0.34375 },\n { x: 0.59375, y: 0.34375 },\n { x: 0.59375, y: 0.34375 },\n { x: 0.65625, y: 0.34375 },\n { x: 0.65625, y: 0.34375 },\n { x: 0.71875, y: 0.34375 },\n { x: 0.71875, y: 0.34375 },\n { x: 0.78125, y: 0.34375 },\n { x: 0.78125, y: 0.34375 },\n { x: 0.84375, y: 0.34375 },\n { x: 0.84375, y: 0.34375 },\n { x: 0.90625, y: 0.34375 },\n { x: 0.90625, y: 0.34375 },\n { x: 0.96875, y: 0.34375 },\n { x: 0.96875, y: 0.34375 },\n { x: 0.03125, y: 0.40625 },\n { x: 0.03125, y: 0.40625 },\n { x: 0.09375, y: 0.40625 },\n { x: 0.09375, y: 0.40625 },\n { x: 0.15625, y: 0.40625 },\n { x: 0.15625, y: 0.40625 },\n { x: 0.21875, y: 0.40625 },\n { x: 0.21875, y: 0.40625 },\n { x: 0.28125, y: 0.40625 },\n { x: 0.28125, y: 0.40625 },\n { x: 0.34375, y: 0.40625 },\n { x: 0.34375, y: 0.40625 },\n { x: 0.40625, y: 0.40625 },\n { x: 0.40625, y: 0.40625 },\n { x: 0.46875, y: 0.40625 },\n { x: 0.46875, y: 0.40625 },\n { x: 0.53125, y: 0.40625 },\n { x: 0.53125, y: 0.40625 },\n { x: 0.59375, y: 0.40625 },\n { x: 0.59375, y: 0.40625 },\n { x: 0.65625, y: 0.40625 },\n { x: 0.65625, y: 0.40625 },\n { x: 0.71875, y: 0.40625 },\n { x: 0.71875, y: 0.40625 },\n { x: 0.78125, y: 0.40625 },\n { x: 0.78125, y: 0.40625 },\n { x: 0.84375, y: 0.40625 },\n { x: 0.84375, y: 0.40625 },\n { x: 0.90625, y: 0.40625 },\n { x: 0.90625, y: 0.40625 },\n { x: 0.96875, y: 0.40625 },\n { x: 0.96875, y: 0.40625 },\n { x: 0.03125, y: 0.46875 },\n { x: 0.03125, y: 0.46875 },\n { x: 0.09375, y: 0.46875 },\n { x: 0.09375, y: 0.46875 },\n { x: 0.15625, y: 0.46875 },\n { x: 0.15625, y: 0.46875 },\n { x: 0.21875, y: 0.46875 },\n { x: 0.21875, y: 0.46875 },\n { x: 0.28125, y: 0.46875 },\n { x: 0.28125, y: 0.46875 },\n { x: 0.34375, y: 0.46875 },\n { x: 0.34375, y: 0.46875 },\n { x: 0.40625, y: 0.46875 },\n { x: 0.40625, y: 0.46875 },\n { x: 0.46875, y: 0.46875 },\n { x: 0.46875, y: 0.46875 },\n { x: 0.53125, y: 0.46875 },\n { x: 0.53125, y: 0.46875 },\n { x: 0.59375, y: 0.46875 },\n { x: 0.59375, y: 0.46875 },\n { x: 0.65625, y: 0.46875 },\n { x: 0.65625, y: 0.46875 },\n { x: 0.71875, y: 0.46875 },\n { x: 0.71875, y: 0.46875 },\n { x: 0.78125, y: 0.46875 },\n { x: 0.78125, y: 0.46875 },\n { x: 0.84375, y: 0.46875 },\n { x: 0.84375, y: 0.46875 },\n { x: 0.90625, y: 0.46875 },\n { x: 0.90625, y: 0.46875 },\n { x: 0.96875, y: 0.46875 },\n { x: 0.96875, y: 0.46875 },\n { x: 0.03125, y: 0.53125 },\n { x: 0.03125, y: 0.53125 },\n { x: 0.09375, y: 0.53125 },\n { x: 0.09375, y: 0.53125 },\n { x: 0.15625, y: 0.53125 },\n { x: 0.15625, y: 0.53125 },\n { x: 0.21875, y: 0.53125 },\n { x: 0.21875, y: 0.53125 },\n { x: 0.28125, y: 0.53125 },\n { x: 0.28125, y: 0.53125 },\n { x: 0.34375, y: 0.53125 },\n { x: 0.34375, y: 0.53125 },\n { x: 0.40625, y: 0.53125 },\n { x: 0.40625, y: 0.53125 },\n { x: 0.46875, y: 0.53125 },\n { x: 0.46875, y: 0.53125 },\n { x: 0.53125, y: 0.53125 },\n { x: 0.53125, y: 0.53125 },\n { x: 0.59375, y: 0.53125 },\n { x: 0.59375, y: 0.53125 },\n { x: 0.65625, y: 0.53125 },\n { x: 0.65625, y: 0.53125 },\n { x: 0.71875, y: 0.53125 },\n { x: 0.71875, y: 0.53125 },\n { x: 0.78125, y: 0.53125 },\n { x: 0.78125, y: 0.53125 },\n { x: 0.84375, y: 0.53125 },\n { x: 0.84375, y: 0.53125 },\n { x: 0.90625, y: 0.53125 },\n { x: 0.90625, y: 0.53125 },\n { x: 0.96875, y: 0.53125 },\n { x: 0.96875, y: 0.53125 },\n { x: 0.03125, y: 0.59375 },\n { x: 0.03125, y: 0.59375 },\n { x: 0.09375, y: 0.59375 },\n { x: 0.09375, y: 0.59375 },\n { x: 0.15625, y: 0.59375 },\n { x: 0.15625, y: 0.59375 },\n { x: 0.21875, y: 0.59375 },\n { x: 0.21875, y: 0.59375 },\n { x: 0.28125, y: 0.59375 },\n { x: 0.28125, y: 0.59375 },\n { x: 0.34375, y: 0.59375 },\n { x: 0.34375, y: 0.59375 },\n { x: 0.40625, y: 0.59375 },\n { x: 0.40625, y: 0.59375 },\n { x: 0.46875, y: 0.59375 },\n { x: 0.46875, y: 0.59375 },\n { x: 0.53125, y: 0.59375 },\n { x: 0.53125, y: 0.59375 },\n { x: 0.59375, y: 0.59375 },\n { x: 0.59375, y: 0.59375 },\n { x: 0.65625, y: 0.59375 },\n { x: 0.65625, y: 0.59375 },\n { x: 0.71875, y: 0.59375 },\n { x: 0.71875, y: 0.59375 },\n { x: 0.78125, y: 0.59375 },\n { x: 0.78125, y: 0.59375 },\n { x: 0.84375, y: 0.59375 },\n { x: 0.84375, y: 0.59375 },\n { x: 0.90625, y: 0.59375 },\n { x: 0.90625, y: 0.59375 },\n { x: 0.96875, y: 0.59375 },\n { x: 0.96875, y: 0.59375 },\n { x: 0.03125, y: 0.65625 },\n { x: 0.03125, y: 0.65625 },\n { x: 0.09375, y: 0.65625 },\n { x: 0.09375, y: 0.65625 },\n { x: 0.15625, y: 0.65625 },\n { x: 0.15625, y: 0.65625 },\n { x: 0.21875, y: 0.65625 },\n { x: 0.21875, y: 0.65625 },\n { x: 0.28125, y: 0.65625 },\n { x: 0.28125, y: 0.65625 },\n { x: 0.34375, y: 0.65625 },\n { x: 0.34375, y: 0.65625 },\n { x: 0.40625, y: 0.65625 },\n { x: 0.40625, y: 0.65625 },\n { x: 0.46875, y: 0.65625 },\n { x: 0.46875, y: 0.65625 },\n { x: 0.53125, y: 0.65625 },\n { x: 0.53125, y: 0.65625 },\n { x: 0.59375, y: 0.65625 },\n { x: 0.59375, y: 0.65625 },\n { x: 0.65625, y: 0.65625 },\n { x: 0.65625, y: 0.65625 },\n { x: 0.71875, y: 0.65625 },\n { x: 0.71875, y: 0.65625 },\n { x: 0.78125, y: 0.65625 },\n { x: 0.78125, y: 0.65625 },\n { x: 0.84375, y: 0.65625 },\n { x: 0.84375, y: 0.65625 },\n { x: 0.90625, y: 0.65625 },\n { x: 0.90625, y: 0.65625 },\n { x: 0.96875, y: 0.65625 },\n { x: 0.96875, y: 0.65625 },\n { x: 0.03125, y: 0.71875 },\n { x: 0.03125, y: 0.71875 },\n { x: 0.09375, y: 0.71875 },\n { x: 0.09375, y: 0.71875 },\n { x: 0.15625, y: 0.71875 },\n { x: 0.15625, y: 0.71875 },\n { x: 0.21875, y: 0.71875 },\n { x: 0.21875, y: 0.71875 },\n { x: 0.28125, y: 0.71875 },\n { x: 0.28125, y: 0.71875 },\n { x: 0.34375, y: 0.71875 },\n { x: 0.34375, y: 0.71875 },\n { x: 0.40625, y: 0.71875 },\n { x: 0.40625, y: 0.71875 },\n { x: 0.46875, y: 0.71875 },\n { x: 0.46875, y: 0.71875 },\n { x: 0.53125, y: 0.71875 },\n { x: 0.53125, y: 0.71875 },\n { x: 0.59375, y: 0.71875 },\n { x: 0.59375, y: 0.71875 },\n { x: 0.65625, y: 0.71875 },\n { x: 0.65625, y: 0.71875 },\n { x: 0.71875, y: 0.71875 },\n { x: 0.71875, y: 0.71875 },\n { x: 0.78125, y: 0.71875 },\n { x: 0.78125, y: 0.71875 },\n { x: 0.84375, y: 0.71875 },\n { x: 0.84375, y: 0.71875 },\n { x: 0.90625, y: 0.71875 },\n { x: 0.90625, y: 0.71875 },\n { x: 0.96875, y: 0.71875 },\n { x: 0.96875, y: 0.71875 },\n { x: 0.03125, y: 0.78125 },\n { x: 0.03125, y: 0.78125 },\n { x: 0.09375, y: 0.78125 },\n { x: 0.09375, y: 0.78125 },\n { x: 0.15625, y: 0.78125 },\n { x: 0.15625, y: 0.78125 },\n { x: 0.21875, y: 0.78125 },\n { x: 0.21875, y: 0.78125 },\n { x: 0.28125, y: 0.78125 },\n { x: 0.28125, y: 0.78125 },\n { x: 0.34375, y: 0.78125 },\n { x: 0.34375, y: 0.78125 },\n { x: 0.40625, y: 0.78125 },\n { x: 0.40625, y: 0.78125 },\n { x: 0.46875, y: 0.78125 },\n { x: 0.46875, y: 0.78125 },\n { x: 0.53125, y: 0.78125 },\n { x: 0.53125, y: 0.78125 },\n { x: 0.59375, y: 0.78125 },\n { x: 0.59375, y: 0.78125 },\n { x: 0.65625, y: 0.78125 },\n { x: 0.65625, y: 0.78125 },\n { x: 0.71875, y: 0.78125 },\n { x: 0.71875, y: 0.78125 },\n { x: 0.78125, y: 0.78125 },\n { x: 0.78125, y: 0.78125 },\n { x: 0.84375, y: 0.78125 },\n { x: 0.84375, y: 0.78125 },\n { x: 0.90625, y: 0.78125 },\n { x: 0.90625, y: 0.78125 },\n { x: 0.96875, y: 0.78125 },\n { x: 0.96875, y: 0.78125 },\n { x: 0.03125, y: 0.84375 },\n { x: 0.03125, y: 0.84375 },\n { x: 0.09375, y: 0.84375 },\n { x: 0.09375, y: 0.84375 },\n { x: 0.15625, y: 0.84375 },\n { x: 0.15625, y: 0.84375 },\n { x: 0.21875, y: 0.84375 },\n { x: 0.21875, y: 0.84375 },\n { x: 0.28125, y: 0.84375 },\n { x: 0.28125, y: 0.84375 },\n { x: 0.34375, y: 0.84375 },\n { x: 0.34375, y: 0.84375 },\n { x: 0.40625, y: 0.84375 },\n { x: 0.40625, y: 0.84375 },\n { x: 0.46875, y: 0.84375 },\n { x: 0.46875, y: 0.84375 },\n { x: 0.53125, y: 0.84375 },\n { x: 0.53125, y: 0.84375 },\n { x: 0.59375, y: 0.84375 },\n { x: 0.59375, y: 0.84375 },\n { x: 0.65625, y: 0.84375 },\n { x: 0.65625, y: 0.84375 },\n { x: 0.71875, y: 0.84375 },\n { x: 0.71875, y: 0.84375 },\n { x: 0.78125, y: 0.84375 },\n { x: 0.78125, y: 0.84375 },\n { x: 0.84375, y: 0.84375 },\n { x: 0.84375, y: 0.84375 },\n { x: 0.90625, y: 0.84375 },\n { x: 0.90625, y: 0.84375 },\n { x: 0.96875, y: 0.84375 },\n { x: 0.96875, y: 0.84375 },\n { x: 0.03125, y: 0.90625 },\n { x: 0.03125, y: 0.90625 },\n { x: 0.09375, y: 0.90625 },\n { x: 0.09375, y: 0.90625 },\n { x: 0.15625, y: 0.90625 },\n { x: 0.15625, y: 0.90625 },\n { x: 0.21875, y: 0.90625 },\n { x: 0.21875, y: 0.90625 },\n { x: 0.28125, y: 0.90625 },\n { x: 0.28125, y: 0.90625 },\n { x: 0.34375, y: 0.90625 },\n { x: 0.34375, y: 0.90625 },\n { x: 0.40625, y: 0.90625 },\n { x: 0.40625, y: 0.90625 },\n { x: 0.46875, y: 0.90625 },\n { x: 0.46875, y: 0.90625 },\n { x: 0.53125, y: 0.90625 },\n { x: 0.53125, y: 0.90625 },\n { x: 0.59375, y: 0.90625 },\n { x: 0.59375, y: 0.90625 },\n { x: 0.65625, y: 0.90625 },\n { x: 0.65625, y: 0.90625 },\n { x: 0.71875, y: 0.90625 },\n { x: 0.71875, y: 0.90625 },\n { x: 0.78125, y: 0.90625 },\n { x: 0.78125, y: 0.90625 },\n { x: 0.84375, y: 0.90625 },\n { x: 0.84375, y: 0.90625 },\n { x: 0.90625, y: 0.90625 },\n { x: 0.90625, y: 0.90625 },\n { x: 0.96875, y: 0.90625 },\n { x: 0.96875, y: 0.90625 },\n { x: 0.03125, y: 0.96875 },\n { x: 0.03125, y: 0.96875 },\n { x: 0.09375, y: 0.96875 },\n { x: 0.09375, y: 0.96875 },\n { x: 0.15625, y: 0.96875 },\n { x: 0.15625, y: 0.96875 },\n { x: 0.21875, y: 0.96875 },\n { x: 0.21875, y: 0.96875 },\n { x: 0.28125, y: 0.96875 },\n { x: 0.28125, y: 0.96875 },\n { x: 0.34375, y: 0.96875 },\n { x: 0.34375, y: 0.96875 },\n { x: 0.40625, y: 0.96875 },\n { x: 0.40625, y: 0.96875 },\n { x: 0.46875, y: 0.96875 },\n { x: 0.46875, y: 0.96875 },\n { x: 0.53125, y: 0.96875 },\n { x: 0.53125, y: 0.96875 },\n { x: 0.59375, y: 0.96875 },\n { x: 0.59375, y: 0.96875 },\n { x: 0.65625, y: 0.96875 },\n { x: 0.65625, y: 0.96875 },\n { x: 0.71875, y: 0.96875 },\n { x: 0.71875, y: 0.96875 },\n { x: 0.78125, y: 0.96875 },\n { x: 0.78125, y: 0.96875 },\n { x: 0.84375, y: 0.96875 },\n { x: 0.84375, y: 0.96875 },\n { x: 0.90625, y: 0.96875 },\n { x: 0.90625, y: 0.96875 },\n { x: 0.96875, y: 0.96875 },\n { x: 0.96875, y: 0.96875 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.0625, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.1875, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.3125, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.4375, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.5625, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.6875, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.8125, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.9375, y: 0.0625 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.0625, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.1875, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.3125, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.4375, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.5625, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.6875, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.8125, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.9375, y: 0.1875 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.0625, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.1875, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.3125, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.4375, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.5625, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.6875, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.8125, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.9375, y: 0.3125 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.0625, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.1875, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.3125, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.4375, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.5625, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.6875, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.8125, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.9375, y: 0.4375 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.0625, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.1875, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.3125, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.4375, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.5625, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.6875, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.8125, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.9375, y: 0.5625 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.0625, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.1875, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.3125, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.4375, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.5625, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.6875, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.8125, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.9375, y: 0.6875 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.0625, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.1875, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.3125, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.4375, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.5625, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.6875, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.8125, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.9375, y: 0.8125 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.0625, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.1875, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.3125, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.4375, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.5625, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.6875, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.8125, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n { x: 0.9375, y: 0.9375 },\n];\n", "/**\n * HandPose model implementation\n * See `handpose.ts` for entry point\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as util from './handposeutil';\nimport * as anchors from './handposeanchors';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport type { Point } from '../result';\n\nexport class HandDetector {\n model: GraphModel;\n anchors: number[][];\n anchorsTensor: Tensor;\n inputSize: number;\n inputSizeTensor: Tensor;\n doubleInputSizeTensor: Tensor;\n\n constructor(model) {\n this.model = model;\n this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);\n this.anchorsTensor = tf.tensor2d(this.anchors);\n this.inputSize = (this.model && this.model.inputs && this.model.inputs[0].shape) ? this.model.inputs[0].shape[2] : 0;\n this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);\n this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);\n }\n\n normalizeBoxes(boxes) {\n const t: Record = {};\n t.boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);\n t.boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);\n t.div = tf.div(t.boxOffsets, this.inputSizeTensor);\n t.boxCenterPoints = tf.add(t.div, this.anchorsTensor);\n t.halfBoxSizes = tf.div(t.boxSizes, this.doubleInputSizeTensor);\n t.sub = tf.sub(t.boxCenterPoints, t.halfBoxSizes);\n t.startPoints = tf.mul(t.sub, this.inputSizeTensor);\n t.add = tf.add(t.boxCenterPoints, t.halfBoxSizes);\n t.endPoints = tf.mul(t.add, this.inputSizeTensor);\n const res = tf.concat2d([t.startPoints, t.endPoints], 1);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return res;\n }\n\n normalizeLandmarks(rawPalmLandmarks, index) {\n const t: Record = {};\n t.reshape = tf.reshape(rawPalmLandmarks, [-1, 7, 2]);\n t.div = tf.div(t.reshape, this.inputSizeTensor);\n t.landmarks = tf.add(t.div, this.anchors[index]);\n const res = tf.mul(t.landmarks, this.inputSizeTensor);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return res;\n }\n\n async predict(input, config): Promise<{ startPoint: Point; endPoint: Point, palmLandmarks: Point[]; confidence: number }[]> {\n const t: Record = {};\n t.resize = tf.image.resizeBilinear(input, [this.inputSize, this.inputSize]);\n t.div = tf.div(t.resize, 127.5);\n t.image = tf.sub(t.div, 1);\n t.batched = this.model.execute(t.image) as Tensor;\n t.predictions = tf.squeeze(t.batched);\n t.slice = tf.slice(t.predictions, [0, 0], [-1, 1]);\n t.sigmoid = tf.sigmoid(t.slice);\n t.scores = tf.squeeze(t.sigmoid);\n const scores = await t.scores.data();\n t.boxes = tf.slice(t.predictions, [0, 1], [-1, 4]);\n t.norm = this.normalizeBoxes(t.boxes);\n // box detection is flaky so we look for 3x boxes than we need results\n t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 3 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);\n const nms = await t.nms.array() as Array;\n const hands: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number }> = [];\n for (const index of nms) {\n const p: Record = {};\n p.box = tf.slice(t.norm, [index, 0], [1, -1]);\n p.slice = tf.slice(t.predictions, [index, 5], [1, 14]);\n p.norm = this.normalizeLandmarks(p.slice, index);\n p.palmLandmarks = tf.reshape(p.norm, [-1, 2]);\n const box = await p.box.data();\n const startPoint = box.slice(0, 2) as unknown as Point;\n const endPoint = box.slice(2, 4) as unknown as Point;\n const palmLandmarks = await p.palmLandmarks.array();\n const hand = { startPoint, endPoint, palmLandmarks, confidence: scores[index] };\n const scaled = util.scaleBoxCoordinates(hand, [input.shape[2] / this.inputSize, input.shape[1] / this.inputSize]);\n hands.push(scaled);\n Object.keys(p).forEach((tensor) => tf.dispose(p[tensor]));\n }\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return hands;\n }\n}\n", "/**\n * HandPose model implementation\n * See `handpose.ts` for entry point\n */\n\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as util from './handposeutil';\nimport type * as detector from './handposedetector';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport { env } from '../util/env';\nimport { now } from '../util/util';\nimport type { Point } from '../result';\n\nconst palmBoxEnlargeFactor = 5; // default 3\nconst handBoxEnlargeFactor = 1.65; // default 1.65\nconst palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];\nconst palmLandmarksPalmBase = 0;\nconst palmLandmarksMiddleFingerBase = 2;\nlet lastTime = 0;\n\nexport class HandPipeline {\n handDetector: detector.HandDetector;\n handPoseModel: GraphModel;\n inputSize: number;\n storedBoxes: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number } | null>;\n skipped: number;\n detectedHands: number;\n\n constructor(handDetector, handPoseModel) {\n this.handDetector = handDetector;\n this.handPoseModel = handPoseModel;\n this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;\n this.storedBoxes = [];\n this.skipped = Number.MAX_SAFE_INTEGER;\n this.detectedHands = 0;\n }\n\n // eslint-disable-next-line class-methods-use-this\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint };\n }\n\n getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {\n const rotatedPalmLandmarks = palmLandmarks.map((coord) => util.rotatePoint([...coord, 1], rotationMatrix));\n const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);\n return util.enlargeBox(util.squarifyBox(boxAroundPalm), palmBoxEnlargeFactor);\n }\n\n getBoxForHandLandmarks(landmarks) {\n const boundingBox = this.calculateLandmarksBoundingBox(landmarks);\n const boxAroundHand = util.enlargeBox(util.squarifyBox(boundingBox), handBoxEnlargeFactor);\n boxAroundHand.palmLandmarks = [];\n for (let i = 0; i < palmLandmarkIds.length; i++) {\n boxAroundHand.palmLandmarks.push(landmarks[palmLandmarkIds[i]].slice(0, 2));\n }\n return boxAroundHand;\n }\n\n transformRawCoords(rawCoords, box2, angle, rotationMatrix) {\n const boxSize = util.getBoxSize(box2);\n const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize, (boxSize[0] + boxSize[1]) / this.inputSize / 2];\n const coordsScaled = rawCoords.map((coord) => [\n scaleFactor[0] * (coord[0] - this.inputSize / 2),\n scaleFactor[1] * (coord[1] - this.inputSize / 2),\n scaleFactor[2] * coord[2],\n ]);\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => {\n const rotated = util.rotatePoint(coord, coordsRotationMatrix);\n return [...rotated, coord[2]];\n });\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...util.getBoxCenter(box2), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => [\n Math.trunc(coord[0] + originalBoxCenter[0]),\n Math.trunc(coord[1] + originalBoxCenter[1]),\n Math.trunc(coord[2]),\n ]);\n }\n\n async estimateHands(image, config) {\n let useFreshBox = false;\n\n // run new detector every skipFrames\n let boxes;\n const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);\n const skipFrame = this.skipped < (config.hand.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame) {\n boxes = await this.handDetector.predict(image, config);\n this.skipped = 0;\n }\n if (config.skipAllowed) this.skipped++;\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxDetected) || !config.hand.landmarks)) {\n this.detectedHands = 0;\n this.storedBoxes = [...boxes];\n // for (const possible of boxes) this.storedBoxes.push(possible);\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n const hands: Array<{ landmarks: Point[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: Point, bottomRight: Point } }> = [];\n\n // go through working set of boxes\n for (let i = 0; i < this.storedBoxes.length; i++) {\n const currentBox = this.storedBoxes[i];\n if (!currentBox) continue;\n if (config.hand.landmarks) {\n const angle = config.hand.rotation ? util.computeRotation(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;\n const palmCenter = util.getBoxCenter(currentBox);\n const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];\n const rotatedImage = config.hand.rotation && env.kernels.includes('rotatewithoffset') ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();\n const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);\n const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;\n const croppedInput = util.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);\n const handImage = tf.div(croppedInput, 255);\n tf.dispose(croppedInput);\n tf.dispose(rotatedImage);\n const [confidenceT, keypoints] = this.handPoseModel.execute(handImage) as Array;\n lastTime = now();\n tf.dispose(handImage);\n const confidence = (await confidenceT.data())[0];\n tf.dispose(confidenceT);\n if (confidence >= config.hand.minConfidence / 4) {\n const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);\n const rawCoords = await keypointsReshaped.array();\n tf.dispose(keypoints);\n tf.dispose(keypointsReshaped);\n const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);\n const nextBoundingBox = this.getBoxForHandLandmarks(coords);\n this.storedBoxes[i] = { ...nextBoundingBox, confidence };\n const result = {\n landmarks: coords,\n confidence,\n boxConfidence: currentBox.confidence,\n fingerConfidence: confidence,\n box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint },\n };\n hands.push(result);\n } else {\n this.storedBoxes[i] = null;\n }\n tf.dispose(keypoints);\n } else {\n // const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), handBoxEnlargeFactor);\n const enlarged = util.enlargeBox(util.squarifyBox(currentBox), handBoxEnlargeFactor);\n const result = {\n confidence: currentBox.confidence,\n boxConfidence: currentBox.confidence,\n fingerConfidence: 0,\n box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },\n landmarks: [],\n };\n hands.push(result);\n }\n }\n this.storedBoxes = this.storedBoxes.filter((a) => a !== null);\n this.detectedHands = hands.length;\n if (hands.length > config.hand.maxDetected) hands.length = config.hand.maxDetected;\n return hands;\n }\n}\n", "/**\n * FingerPose algorithm implementation\n * See `fingerpose.ts` for entry point\n */\n\nexport const Finger = {\n thumb: 0,\n index: 1,\n middle: 2,\n ring: 3,\n pinky: 4,\n all: [0, 1, 2, 3, 4], // just for convenience\n nameMapping: { 0: 'thumb', 1: 'index', 2: 'middle', 3: 'ring', 4: 'pinky' },\n // Describes mapping of joints based on the 21 points returned by handpose.\n // [0] Palm\n // [1-4] Thumb\n // [5-8] Index\n // [9-12] Middle\n // [13-16] Ring\n // [17-20] Pinky\n pointsMapping: {\n 0: [[0, 1], [1, 2], [2, 3], [3, 4]],\n 1: [[0, 5], [5, 6], [6, 7], [7, 8]],\n 2: [[0, 9], [9, 10], [10, 11], [11, 12]],\n 3: [[0, 13], [13, 14], [14, 15], [15, 16]],\n 4: [[0, 17], [17, 18], [18, 19], [19, 20]],\n },\n getName: (value) => Finger.nameMapping[value],\n getPoints: (value) => Finger.pointsMapping[value],\n};\n\nexport const FingerCurl = {\n none: 0,\n half: 1,\n full: 2,\n nameMapping: { 0: 'none', 1: 'half', 2: 'full' },\n getName: (value) => FingerCurl.nameMapping[value],\n};\n\nexport const FingerDirection = {\n verticalUp: 0,\n verticalDown: 1,\n horizontalLeft: 2,\n horizontalRight: 3,\n diagonalUpRight: 4,\n diagonalUpLeft: 5,\n diagonalDownRight: 6,\n diagonalDownLeft: 7,\n nameMapping: { 0: 'verticalUp', 1: 'verticalDown', 2: 'horizontalLeft', 3: 'horizontalRight', 4: 'diagonalUpRight', 5: 'diagonalUpLeft', 6: 'diagonalDownRight', 7: 'diagonalDownLeft' },\n getName: (value) => FingerDirection.nameMapping[value],\n};\n\nexport class FingerGesture {\n name;\n curls;\n directions;\n weights;\n weightsRelative;\n\n constructor(name) {\n // name (should be unique)\n this.name = name;\n this.curls = {};\n this.directions = {};\n this.weights = [1.0, 1.0, 1.0, 1.0, 1.0];\n this.weightsRelative = [1.0, 1.0, 1.0, 1.0, 1.0];\n }\n\n addCurl(finger, curl, confidence) {\n if (typeof this.curls[finger] === 'undefined') this.curls[finger] = [];\n this.curls[finger].push([curl, confidence]);\n }\n\n addDirection(finger, position, confidence) {\n if (!this.directions[finger]) this.directions[finger] = [];\n this.directions[finger].push([position, confidence]);\n }\n\n setWeight(finger, weight) {\n this.weights[finger] = weight;\n // recalculate relative weights\n const total = this.weights.reduce((a, b) => a + b, 0);\n this.weightsRelative = this.weights.map((el) => el * 5 / total);\n }\n\n matchAgainst(detectedCurls, detectedDirections) {\n let confidence = 0.0;\n // look at the detected curl of each finger and compare with\n // the expected curl of this finger inside current gesture\n for (const fingerIdx in detectedCurls) {\n const detectedCurl = detectedCurls[fingerIdx];\n const expectedCurls = this.curls[fingerIdx];\n if (typeof expectedCurls === 'undefined') {\n // no curl description available for this finger\n // add default confidence of \"1\"\n confidence += this.weightsRelative[fingerIdx];\n continue;\n }\n // compare to each possible curl of this specific finger\n for (const [expectedCurl, score] of expectedCurls) {\n if (detectedCurl === expectedCurl) {\n confidence += score * this.weightsRelative[fingerIdx];\n break;\n }\n }\n }\n // same for detected direction of each finger\n for (const fingerIdx in detectedDirections) {\n const detectedDirection = detectedDirections[fingerIdx];\n const expectedDirections = this.directions[fingerIdx];\n if (typeof expectedDirections === 'undefined') {\n // no direction description available for this finger\n // add default confidence of \"1\"\n confidence += this.weightsRelative[fingerIdx];\n continue;\n }\n // compare to each possible direction of this specific finger\n for (const [expectedDirection, score] of expectedDirections) {\n if (detectedDirection === expectedDirection) {\n confidence += score * this.weightsRelative[fingerIdx];\n break;\n }\n }\n }\n return confidence / 10;\n }\n}\n", "/**\n * FingerPose algorithm implementation\n * See `fingerpose.ts` for entry point\n */\n\nimport { Finger, FingerCurl, FingerDirection, FingerGesture } from './fingerdef';\n\n// describe thumbs up gesture \uD83D\uDC4D\nconst ThumbsUp = new FingerGesture('thumbs up');\nThumbsUp.addCurl(Finger.thumb, FingerCurl.none, 1.0);\nThumbsUp.addDirection(Finger.thumb, FingerDirection.verticalUp, 1.0);\nThumbsUp.addDirection(Finger.thumb, FingerDirection.diagonalUpLeft, 0.25);\nThumbsUp.addDirection(Finger.thumb, FingerDirection.diagonalUpRight, 0.25);\nfor (const finger of [Finger.index, Finger.middle, Finger.ring, Finger.pinky]) {\n ThumbsUp.addCurl(finger, FingerCurl.full, 1.0);\n ThumbsUp.addDirection(finger, FingerDirection.horizontalLeft, 1.0);\n ThumbsUp.addDirection(finger, FingerDirection.horizontalRight, 1.0);\n}\n\n// describe Victory gesture \u270C\uFE0F\nconst Victory = new FingerGesture('victory');\nVictory.addCurl(Finger.thumb, FingerCurl.half, 0.5);\nVictory.addCurl(Finger.thumb, FingerCurl.none, 0.5);\nVictory.addDirection(Finger.thumb, FingerDirection.verticalUp, 1.0);\nVictory.addDirection(Finger.thumb, FingerDirection.diagonalUpLeft, 1.0);\nVictory.addCurl(Finger.index, FingerCurl.none, 1.0);\nVictory.addDirection(Finger.index, FingerDirection.verticalUp, 0.75);\nVictory.addDirection(Finger.index, FingerDirection.diagonalUpLeft, 1.0);\nVictory.addCurl(Finger.middle, FingerCurl.none, 1.0);\nVictory.addDirection(Finger.middle, FingerDirection.verticalUp, 1.0);\nVictory.addDirection(Finger.middle, FingerDirection.diagonalUpLeft, 0.75);\nVictory.addCurl(Finger.ring, FingerCurl.full, 1.0);\nVictory.addDirection(Finger.ring, FingerDirection.verticalUp, 0.2);\nVictory.addDirection(Finger.ring, FingerDirection.diagonalUpLeft, 1.0);\nVictory.addDirection(Finger.ring, FingerDirection.horizontalLeft, 0.2);\nVictory.addCurl(Finger.pinky, FingerCurl.full, 1.0);\nVictory.addDirection(Finger.pinky, FingerDirection.verticalUp, 0.2);\nVictory.addDirection(Finger.pinky, FingerDirection.diagonalUpLeft, 1.0);\nVictory.addDirection(Finger.pinky, FingerDirection.horizontalLeft, 0.2);\nVictory.setWeight(Finger.index, 2);\nVictory.setWeight(Finger.middle, 2);\n\nexport default [ThumbsUp, Victory];\n", "/**\n * FingerPose algorithm implementation constants\n *\n * Based on: [**FingerPose***](https://github.com/andypotato/fingerpose)\n */\n\nimport { Finger, FingerCurl, FingerDirection } from './fingerdef';\nimport Gestures from '../hand/fingergesture';\n\nconst minConfidence = 0.7;\nconst options = {\n // curl estimation\n HALF_CURL_START_LIMIT: 60.0,\n NO_CURL_START_LIMIT: 130.0,\n // direction estimation\n DISTANCE_VOTE_POWER: 1.1,\n SINGLE_ANGLE_VOTE_POWER: 0.9,\n TOTAL_ANGLE_VOTE_POWER: 1.6,\n};\n\nfunction calculateSlope(point1x, point1y, point2x, point2y) {\n const value = (point1y - point2y) / (point1x - point2x);\n let slope = Math.atan(value) * 180 / Math.PI;\n if (slope <= 0) slope = -slope;\n else if (slope > 0) slope = 180 - slope;\n return slope;\n}\n\n// point1, point2 are 2d or 3d point arrays (xy[z])\n// returns either a single scalar (2d) or array of two slopes (3d)\nfunction getSlopes(point1, point2) {\n if (!point1 || !point2) return [0, 0];\n const slopeXY = calculateSlope(point1[0], point1[1], point2[0], point2[1]);\n if (point1.length === 2) return slopeXY;\n const slopeYZ = calculateSlope(point1[1], point1[2], point2[1], point2[2]);\n return [slopeXY, slopeYZ];\n}\n\nfunction angleOrientationAt(angle, weightageAt = 1.0) {\n let isVertical = 0;\n let isDiagonal = 0;\n let isHorizontal = 0;\n if (angle >= 75.0 && angle <= 105.0) isVertical = 1 * weightageAt;\n else if (angle >= 25.0 && angle <= 155.0) isDiagonal = 1 * weightageAt;\n else isHorizontal = 1 * weightageAt;\n return [isVertical, isDiagonal, isHorizontal];\n}\n\nfunction estimateFingerCurl(startPoint, midPoint, endPoint) {\n const start_mid_x_dist = startPoint[0] - midPoint[0];\n const start_end_x_dist = startPoint[0] - endPoint[0];\n const mid_end_x_dist = midPoint[0] - endPoint[0];\n const start_mid_y_dist = startPoint[1] - midPoint[1];\n const start_end_y_dist = startPoint[1] - endPoint[1];\n const mid_end_y_dist = midPoint[1] - endPoint[1];\n const start_mid_z_dist = startPoint[2] - midPoint[2];\n const start_end_z_dist = startPoint[2] - endPoint[2];\n const mid_end_z_dist = midPoint[2] - endPoint[2];\n const start_mid_dist = Math.sqrt(start_mid_x_dist * start_mid_x_dist + start_mid_y_dist * start_mid_y_dist + start_mid_z_dist * start_mid_z_dist);\n const start_end_dist = Math.sqrt(start_end_x_dist * start_end_x_dist + start_end_y_dist * start_end_y_dist + start_end_z_dist * start_end_z_dist);\n const mid_end_dist = Math.sqrt(mid_end_x_dist * mid_end_x_dist + mid_end_y_dist * mid_end_y_dist + mid_end_z_dist * mid_end_z_dist);\n let cos_in = (mid_end_dist * mid_end_dist + start_mid_dist * start_mid_dist - start_end_dist * start_end_dist) / (2 * mid_end_dist * start_mid_dist);\n if (cos_in > 1.0) cos_in = 1.0;\n else if (cos_in < -1.0) cos_in = -1.0;\n let angleOfCurve = Math.acos(cos_in);\n angleOfCurve = (57.2958 * angleOfCurve) % 180;\n let fingerCurl;\n if (angleOfCurve > options.NO_CURL_START_LIMIT) fingerCurl = FingerCurl.none;\n else if (angleOfCurve > options.HALF_CURL_START_LIMIT) fingerCurl = FingerCurl.half;\n else fingerCurl = FingerCurl.full;\n return fingerCurl;\n}\n\nfunction estimateHorizontalDirection(start_end_x_dist, start_mid_x_dist, mid_end_x_dist, max_dist_x) {\n let estimatedDirection;\n if (max_dist_x === Math.abs(start_end_x_dist)) {\n if (start_end_x_dist > 0) estimatedDirection = FingerDirection.horizontalLeft;\n else estimatedDirection = FingerDirection.horizontalRight;\n } else if (max_dist_x === Math.abs(start_mid_x_dist)) {\n if (start_mid_x_dist > 0) estimatedDirection = FingerDirection.horizontalLeft;\n else estimatedDirection = FingerDirection.horizontalRight;\n } else {\n if (mid_end_x_dist > 0) estimatedDirection = FingerDirection.horizontalLeft;\n else estimatedDirection = FingerDirection.horizontalRight;\n }\n return estimatedDirection;\n}\n\nfunction estimateVerticalDirection(start_end_y_dist, start_mid_y_dist, mid_end_y_dist, max_dist_y) {\n let estimatedDirection;\n if (max_dist_y === Math.abs(start_end_y_dist)) {\n if (start_end_y_dist < 0) estimatedDirection = FingerDirection.verticalDown;\n else estimatedDirection = FingerDirection.verticalUp;\n } else if (max_dist_y === Math.abs(start_mid_y_dist)) {\n if (start_mid_y_dist < 0) estimatedDirection = FingerDirection.verticalDown;\n else estimatedDirection = FingerDirection.verticalUp;\n } else {\n if (mid_end_y_dist < 0) estimatedDirection = FingerDirection.verticalDown;\n else estimatedDirection = FingerDirection.verticalUp;\n }\n return estimatedDirection;\n}\n\nfunction estimateDiagonalDirection(start_end_y_dist, start_mid_y_dist, mid_end_y_dist, max_dist_y, start_end_x_dist, start_mid_x_dist, mid_end_x_dist, max_dist_x) {\n let estimatedDirection;\n const reqd_vertical_direction = estimateVerticalDirection(start_end_y_dist, start_mid_y_dist, mid_end_y_dist, max_dist_y);\n const reqd_horizontal_direction = estimateHorizontalDirection(start_end_x_dist, start_mid_x_dist, mid_end_x_dist, max_dist_x);\n if (reqd_vertical_direction === FingerDirection.verticalUp) {\n if (reqd_horizontal_direction === FingerDirection.horizontalLeft) estimatedDirection = FingerDirection.diagonalUpLeft;\n else estimatedDirection = FingerDirection.diagonalUpRight;\n } else {\n if (reqd_horizontal_direction === FingerDirection.horizontalLeft) estimatedDirection = FingerDirection.diagonalDownLeft;\n else estimatedDirection = FingerDirection.diagonalDownRight;\n }\n return estimatedDirection;\n}\n\nfunction calculateFingerDirection(startPoint, midPoint, endPoint, fingerSlopes) {\n const start_mid_x_dist = startPoint[0] - midPoint[0];\n const start_end_x_dist = startPoint[0] - endPoint[0];\n const mid_end_x_dist = midPoint[0] - endPoint[0];\n const start_mid_y_dist = startPoint[1] - midPoint[1];\n const start_end_y_dist = startPoint[1] - endPoint[1];\n const mid_end_y_dist = midPoint[1] - endPoint[1];\n const max_dist_x = Math.max(Math.abs(start_mid_x_dist), Math.abs(start_end_x_dist), Math.abs(mid_end_x_dist));\n const max_dist_y = Math.max(Math.abs(start_mid_y_dist), Math.abs(start_end_y_dist), Math.abs(mid_end_y_dist));\n let voteVertical = 0.0;\n let voteDiagonal = 0.0;\n let voteHorizontal = 0.0;\n const start_end_x_y_dist_ratio = max_dist_y / (max_dist_x + 0.00001);\n if (start_end_x_y_dist_ratio > 1.5) voteVertical += options.DISTANCE_VOTE_POWER;\n else if (start_end_x_y_dist_ratio > 0.66) voteDiagonal += options.DISTANCE_VOTE_POWER;\n else voteHorizontal += options.DISTANCE_VOTE_POWER;\n const start_mid_dist = Math.sqrt(start_mid_x_dist * start_mid_x_dist + start_mid_y_dist * start_mid_y_dist);\n const start_end_dist = Math.sqrt(start_end_x_dist * start_end_x_dist + start_end_y_dist * start_end_y_dist);\n const mid_end_dist = Math.sqrt(mid_end_x_dist * mid_end_x_dist + mid_end_y_dist * mid_end_y_dist);\n const max_dist = Math.max(start_mid_dist, start_end_dist, mid_end_dist);\n let calc_start_point_x = startPoint[0];\n let calc_start_point_y = startPoint[1];\n let calc_end_point_x = endPoint[0];\n let calc_end_point_y = endPoint[1];\n if (max_dist === start_mid_dist) {\n calc_end_point_x = endPoint[0];\n calc_end_point_y = endPoint[1];\n } else if (max_dist === mid_end_dist) {\n calc_start_point_x = midPoint[0];\n calc_start_point_y = midPoint[1];\n }\n const calcStartPoint = [calc_start_point_x, calc_start_point_y];\n const calcEndPoint = [calc_end_point_x, calc_end_point_y];\n const totalAngle = getSlopes(calcStartPoint, calcEndPoint);\n const votes = angleOrientationAt(totalAngle, options.TOTAL_ANGLE_VOTE_POWER);\n voteVertical += votes[0];\n voteDiagonal += votes[1];\n voteHorizontal += votes[2];\n for (const fingerSlope of fingerSlopes) {\n const fingerVotes = angleOrientationAt(fingerSlope, options.SINGLE_ANGLE_VOTE_POWER);\n voteVertical += fingerVotes[0];\n voteDiagonal += fingerVotes[1];\n voteHorizontal += fingerVotes[2];\n }\n // in case of tie, highest preference goes to Vertical,\n // followed by horizontal and then diagonal\n let estimatedDirection;\n if (voteVertical === Math.max(voteVertical, voteDiagonal, voteHorizontal)) {\n estimatedDirection = estimateVerticalDirection(start_end_y_dist, start_mid_y_dist, mid_end_y_dist, max_dist_y);\n } else if (voteHorizontal === Math.max(voteDiagonal, voteHorizontal)) {\n estimatedDirection = estimateHorizontalDirection(start_end_x_dist, start_mid_x_dist, mid_end_x_dist, max_dist_x);\n } else {\n estimatedDirection = estimateDiagonalDirection(start_end_y_dist, start_mid_y_dist, mid_end_y_dist, max_dist_y, start_end_x_dist, start_mid_x_dist, mid_end_x_dist, max_dist_x);\n }\n return estimatedDirection;\n}\n\nfunction estimate(landmarks) {\n // step 1: calculate slopes\n const slopesXY: Array = [];\n const slopesYZ: Array = [];\n const fingerCurls: Array = [];\n const fingerDirections: Array = [];\n if (!landmarks) return { curls: fingerCurls, directions: fingerDirections };\n\n // step 1: calculate slopes\n for (const finger of Finger.all) {\n const points = Finger.getPoints(finger);\n const slopeAtXY: Array = [];\n const slopeAtYZ: Array = [];\n for (const point of points) {\n const point1 = landmarks[point[0]];\n const point2 = landmarks[point[1]];\n // calculate single slope\n const slopes = getSlopes(point1, point2);\n const slopeXY = slopes[0];\n const slopeYZ = slopes[1];\n slopeAtXY.push(slopeXY);\n slopeAtYZ.push(slopeYZ);\n }\n slopesXY.push(slopeAtXY);\n slopesYZ.push(slopeAtYZ);\n }\n\n // step 2: calculate orientations\n for (const finger of Finger.all) {\n // start finger predictions from palm - except for thumb\n const pointIndexAt = (finger === Finger.thumb) ? 1 : 0;\n const fingerPointsAt = Finger.getPoints(finger);\n const startPoint = landmarks[fingerPointsAt[pointIndexAt][0]];\n const midPoint = landmarks[fingerPointsAt[pointIndexAt + 1][1]];\n const endPoint = landmarks[fingerPointsAt[3][1]];\n // check if finger is curled\n const fingerCurled = estimateFingerCurl(startPoint, midPoint, endPoint);\n const fingerPosition = calculateFingerDirection(startPoint, midPoint, endPoint, slopesXY[finger].slice(pointIndexAt));\n fingerCurls[finger] = fingerCurled;\n fingerDirections[finger] = fingerPosition;\n }\n return { curls: fingerCurls, directions: fingerDirections };\n}\n\nexport function analyze(keypoints) { // get estimations of curl / direction for each finger\n if (!keypoints || keypoints.length === 0) return null;\n const estimatorRes = estimate(keypoints);\n const landmarks = {};\n for (const fingerIdx of Finger.all) {\n landmarks[Finger.getName(fingerIdx)] = {\n curl: FingerCurl.getName(estimatorRes.curls[fingerIdx]),\n direction: FingerDirection.getName(estimatorRes.directions[fingerIdx]),\n };\n }\n return landmarks;\n}\n\nexport function match(keypoints) { // compare gesture description to each known gesture\n const poses: Array<{ name: string, confidence: number }> = [];\n if (!keypoints || keypoints.length === 0) return poses;\n const estimatorRes = estimate(keypoints);\n for (const gesture of Gestures) {\n const confidence = gesture.matchAgainst(estimatorRes.curls, estimatorRes.directions);\n if (confidence >= minConfidence) poses.push({ name: gesture.name, confidence });\n }\n return poses;\n}\n", "/**\n * HandPose model implementation\n *\n * Based on: [**MediaPipe HandPose**](https://drive.google.com/file/d/1sv4sSb9BSNVZhLzxXJ0jBv9DqD-4jnAz/view)\n */\n\nimport { log, join } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as handdetector from './handposedetector';\nimport * as handpipeline from './handposepipeline';\nimport * as fingerPose from './fingerpose';\nimport type { HandResult, Box, Point } from '../result';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\nconst meshAnnotations = {\n thumb: [1, 2, 3, 4],\n index: [5, 6, 7, 8],\n middle: [9, 10, 11, 12],\n ring: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palm: [0],\n};\n\nlet handDetectorModel: GraphModel | null;\nlet handPoseModel: GraphModel | null;\nlet handPipeline: handpipeline.HandPipeline;\n\nexport async function predict(input: Tensor, config: Config): Promise {\n const predictions = await handPipeline.estimateHands(input, config);\n if (!predictions) return [];\n const hands: Array = [];\n for (let i = 0; i < predictions.length; i++) {\n const annotations = {};\n if (predictions[i].landmarks) {\n for (const key of Object.keys(meshAnnotations)) {\n annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);\n }\n }\n\n const keypoints = predictions[i].landmarks as unknown as Array;\n\n let box: Box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work\n let boxRaw: Box = [0, 0, 0, 0];\n if (keypoints && keypoints.length > 0) { // if we have landmarks, calculate box based on landmarks\n for (const pt of keypoints) {\n if (pt[0] < box[0]) box[0] = pt[0];\n if (pt[1] < box[1]) box[1] = pt[1];\n if (pt[0] > box[2]) box[2] = pt[0];\n if (pt[1] > box[3]) box[3] = pt[1];\n }\n box[2] -= box[0];\n box[3] -= box[1];\n boxRaw = [box[0] / (input.shape[2] || 0), box[1] / (input.shape[1] || 0), box[2] / (input.shape[2] || 0), box[3] / (input.shape[1] || 0)];\n } else { // otherwise use box from prediction\n box = predictions[i].box ? [\n Math.trunc(Math.max(0, predictions[i].box.topLeft[0])),\n Math.trunc(Math.max(0, predictions[i].box.topLeft[1])),\n Math.trunc(Math.min((input.shape[2] || 0), predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0])),\n Math.trunc(Math.min((input.shape[1] || 0), predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])),\n ] : [0, 0, 0, 0];\n boxRaw = [\n (predictions[i].box.topLeft[0]) / (input.shape[2] || 0),\n (predictions[i].box.topLeft[1]) / (input.shape[1] || 0),\n (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / (input.shape[2] || 0),\n (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / (input.shape[1] || 0),\n ];\n }\n const landmarks = fingerPose.analyze(keypoints);\n hands.push({\n id: i,\n score: Math.round(100 * predictions[i].confidence) / 100,\n boxScore: Math.round(100 * predictions[i].boxConfidence) / 100,\n fingerScore: Math.round(100 * predictions[i].fingerConfidence) / 100,\n label: 'hand',\n box,\n boxRaw,\n keypoints,\n annotations: annotations as HandResult['annotations'],\n landmarks: landmarks as HandResult['landmarks'],\n });\n }\n return hands;\n}\n\nexport async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {\n if (env.initial) {\n handDetectorModel = null;\n handPoseModel = null;\n }\n if (!handDetectorModel || !handPoseModel) {\n [handDetectorModel, handPoseModel] = await Promise.all([\n config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector?.modelPath || ''), { fromTFHub: (config.hand.detector?.modelPath || '').includes('tfhub.dev') }) as unknown as GraphModel : null,\n config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton?.modelPath || ''), { fromTFHub: (config.hand.skeleton?.modelPath || '').includes('tfhub.dev') }) as unknown as GraphModel : null,\n ]);\n if (config.hand.enabled) {\n if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector?.modelPath || '');\n else if (config.debug) log('load model:', handDetectorModel['modelUrl']);\n if (!handPoseModel || !handPoseModel['modelUrl']) log('load model failed:', config.hand.skeleton?.modelPath || '');\n else if (config.debug) log('load model:', handPoseModel['modelUrl']);\n }\n } else {\n if (config.debug) log('cached model:', handDetectorModel['modelUrl']);\n if (config.debug) log('cached model:', handPoseModel['modelUrl']);\n }\n const handDetector = new handdetector.HandDetector(handDetectorModel);\n handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel);\n return [handDetectorModel, handPoseModel];\n}\n", "import type { Point, Box } from '../result';\n\nexport function calc(keypoints: Array, outputSize: [number, number] = [1, 1]) {\n const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords\n const min = [Math.min(...coords[0]), Math.min(...coords[1])];\n const max = [Math.max(...coords[0]), Math.max(...coords[1])];\n const box: Box = [min[0], min[1], max[0] - min[0], max[1] - min[1]];\n const boxRaw: Box = [box[0] / outputSize[0], box[1] / outputSize[1], box[2] / outputSize[0], box[3] / outputSize[1]];\n return { box, boxRaw };\n}\n\nexport function square(keypoints: Array, outputSize: [number, number] = [1, 1]) {\n const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords\n const min = [Math.min(...coords[0]), Math.min(...coords[1])];\n const max = [Math.max(...coords[0]), Math.max(...coords[1])];\n const center = [(min[0] + max[0]) / 2, (min[1] + max[1]) / 2]; // find center x and y coord of all fingers\n const dist = Math.max(center[0] - min[0], center[1] - min[1], -center[0] + max[0], -center[1] + max[1]); // largest distance from center in any direction\n const box: Box = [Math.trunc(center[0] - dist), Math.trunc(center[1] - dist), Math.trunc(2 * dist), Math.trunc(2 * dist)];\n const boxRaw: Box = [box[0] / outputSize[0], box[1] / outputSize[1], box[2] / outputSize[0], box[3] / outputSize[1]];\n return { box, boxRaw };\n}\n\nexport function scale(box: Box, scaleFact: number) {\n const dist = [box[2] * scaleFact, box[3] * scaleFact];\n const newBox: Box = [\n box[0] - (dist[0] - box[2]) / 2,\n box[1] - (dist[1] - box[3]) / 2,\n dist[0],\n dist[1],\n ];\n return newBox;\n}\n\nexport function crop(box: Box) { // [y1, x1, y2, x2] clamped to 0..1\n const yxBox: Box = [Math.max(0, box[1]), Math.max(0, box[0]), Math.min(1, box[3] + box[1]), Math.min(1, box[2] + box[0])];\n return yxBox;\n}\n", "/**\n * HandTrack model implementation\n *\n * Based on:\n * - Hand Detection & Skeleton: [**MediaPipe HandPose**](https://drive.google.com/file/d/1sv4sSb9BSNVZhLzxXJ0jBv9DqD-4jnAz/view)\n * - Hand Tracking: [**HandTracking**](https://github.com/victordibia/handtracking)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as box from '../util/box';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { HandResult, Box, Point } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\nimport * as fingerPose from './fingerpose';\nimport { fakeOps } from '../tfjs/backend';\n\nconst models: [GraphModel | null, GraphModel | null] = [null, null];\nconst modelOutputNodes = ['StatefulPartitionedCall/Postprocessor/Slice', 'StatefulPartitionedCall/Postprocessor/ExpandDims_1'];\n\nconst inputSize = [[0, 0], [0, 0]];\n\nconst classes = ['hand', 'fist', 'pinch', 'point', 'face', 'tip', 'pinchtip'];\nconst faceIndex = 4;\n\nconst boxExpandFact = 1.7;\nconst maxDetectorResolution = 512;\nconst detectorExpandFact = 1.4;\n\nlet skipped = Number.MAX_SAFE_INTEGER;\nlet lastTime = 0;\nlet outputSize: [number, number] = [0, 0];\n\ntype HandDetectResult = {\n id: number,\n score: number,\n box: Box,\n boxRaw: Box,\n boxCrop: Box,\n label: string,\n}\n\nconst cache: {\n boxes: Array,\n hands: Array;\n} = {\n boxes: [],\n hands: [],\n};\n\nconst fingerMap = {\n thumb: [1, 2, 3, 4],\n index: [5, 6, 7, 8],\n middle: [9, 10, 11, 12],\n ring: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palm: [0],\n};\n\nexport async function loadDetect(config: Config): Promise {\n // HandTrack Model: Original: TFJS Port: \n if (env.initial) models[0] = null;\n if (!models[0]) {\n // handtrack model has some kernel ops defined in model but those are never referenced and non-existent in tfjs\n // ideally need to prune the model itself\n fakeOps(['tensorlistreserve', 'enter', 'tensorlistfromtensor', 'merge', 'loopcond', 'switch', 'exit', 'tensorliststack', 'nextiteration', 'tensorlistsetitem', 'tensorlistgetitem', 'reciprocal', 'shape', 'split', 'where'], config);\n models[0] = await tf.loadGraphModel(join(config.modelBasePath, config.hand.detector?.modelPath || '')) as unknown as GraphModel;\n const inputs = Object.values(models[0].modelSignature['inputs']);\n inputSize[0][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;\n inputSize[0][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;\n if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.hand.detector?.modelPath);\n else if (config.debug) log('load model:', models[0]['modelUrl']);\n } else if (config.debug) log('cached model:', models[0]['modelUrl']);\n return models[0];\n}\n\nexport async function loadSkeleton(config: Config): Promise {\n if (env.initial) models[1] = null;\n if (!models[1]) {\n models[1] = await tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton?.modelPath || '')) as unknown as GraphModel;\n const inputs = Object.values(models[1].modelSignature['inputs']);\n inputSize[1][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;\n inputSize[1][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;\n if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.hand.skeleton?.modelPath);\n else if (config.debug) log('load model:', models[1]['modelUrl']);\n } else if (config.debug) log('cached model:', models[1]['modelUrl']);\n return models[1];\n}\n\nexport async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {\n if (!models[0]) await loadDetect(config);\n if (!models[1]) await loadSkeleton(config);\n return models;\n}\n\nasync function detectHands(input: Tensor, config: Config): Promise {\n const hands: HandDetectResult[] = [];\n if (!input || !models[0]) return hands;\n const t: Record = {};\n const ratio = (input.shape[2] || 1) / (input.shape[1] || 1);\n const height = Math.min(Math.round((input.shape[1] || 0) / 8) * 8, maxDetectorResolution); // use dynamic input size but cap at 512\n const width = Math.round(height * ratio / 8) * 8;\n t.resize = tf.image.resizeBilinear(input, [height, width]); // todo: resize with padding\n t.cast = tf.cast(t.resize, 'int32');\n [t.rawScores, t.rawBoxes] = await models[0].executeAsync(t.cast, modelOutputNodes) as Tensor[];\n t.boxes = tf.squeeze(t.rawBoxes, [0, 2]);\n t.scores = tf.squeeze(t.rawScores, [0]);\n const classScores: Array = tf.unstack(t.scores, 1); // unstack scores based on classes\n tf.dispose(classScores[faceIndex]);\n classScores.splice(faceIndex, 1); // remove faces\n t.filtered = tf.stack(classScores, 1); // restack\n tf.dispose(classScores);\n t.max = tf.max(t.filtered, 1); // max overall score\n t.argmax = tf.argMax(t.filtered, 1); // class index of max overall score\n let id = 0;\n t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.max, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);\n const nms = await t.nms.data();\n const scores = await t.max.data();\n const classNum = await t.argmax.data();\n for (const nmsIndex of Array.from(nms)) { // generates results for each class\n const boxSlice = tf.slice(t.boxes, nmsIndex, 1);\n const boxYX = await boxSlice.data();\n tf.dispose(boxSlice);\n // const boxSquareSize = Math.max(boxData[3] - boxData[1], boxData[2] - boxData[0]);\n const boxData: Box = [boxYX[1], boxYX[0], boxYX[3] - boxYX[1], boxYX[2] - boxYX[0]]; // yx box reshaped to standard box\n const boxRaw: Box = box.scale(boxData, detectorExpandFact);\n const boxCrop: Box = box.crop(boxRaw); // crop box is based on raw box\n const boxFull: Box = [Math.trunc(boxData[0] * outputSize[0]), Math.trunc(boxData[1] * outputSize[1]), Math.trunc(boxData[2] * outputSize[0]), Math.trunc(boxData[3] * outputSize[1])];\n const score = scores[nmsIndex];\n const label = classes[classNum[nmsIndex]];\n const hand: HandDetectResult = { id: id++, score, box: boxFull, boxRaw, boxCrop, label };\n hands.push(hand);\n }\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n hands.sort((a, b) => b.score - a.score);\n if (hands.length > (config.hand.maxDetected || 1)) hands.length = (config.hand.maxDetected || 1);\n return hands;\n}\n\nasync function detectFingers(input: Tensor, h: HandDetectResult, config: Config): Promise {\n const hand: HandResult = { // initial values inherited from hand detect\n id: h.id,\n score: Math.round(100 * h.score) / 100,\n boxScore: Math.round(100 * h.score) / 100,\n fingerScore: 0,\n box: h.box,\n boxRaw: h.boxRaw,\n label: h.label,\n keypoints: [],\n landmarks: {} as HandResult['landmarks'],\n annotations: {} as HandResult['annotations'],\n };\n if (input && models[1] && config.hand.landmarks && h.score > (config.hand.minConfidence || 0)) {\n const t: Record = {};\n t.crop = tf.image.cropAndResize(input, [h.boxCrop], [0], [inputSize[1][0], inputSize[1][1]], 'bilinear');\n t.cast = tf.cast(t.crop, 'float32');\n t.div = tf.div(t.cast, 255);\n [t.score, t.keypoints] = models[1].execute(t.div, ['Identity_1', 'Identity']) as Tensor[];\n const rawScore = (await t.score.data())[0];\n const score = (100 - Math.trunc(100 / (1 + Math.exp(rawScore)))) / 100; // reverse sigmoid value\n if (score >= (config.hand.minConfidence || 0)) {\n hand.fingerScore = score;\n t.reshaped = tf.reshape(t.keypoints, [-1, 3]);\n const coordsData: Point[] = await t.reshaped.array() as Point[];\n const coordsRaw: Point[] = coordsData.map((kpt) => [kpt[0] / inputSize[1][1], kpt[1] / inputSize[1][0], (kpt[2] || 0)]);\n const coordsNorm: Point[] = coordsRaw.map((kpt) => [kpt[0] * h.boxRaw[2], kpt[1] * h.boxRaw[3], (kpt[2] || 0)]);\n hand.keypoints = (coordsNorm).map((kpt) => [\n outputSize[0] * (kpt[0] + h.boxRaw[0]),\n outputSize[1] * (kpt[1] + h.boxRaw[1]),\n (kpt[2] || 0),\n ]);\n // hand.box = box.scale(h.box, 1 / detectorExpandFact); // scale box down for visual appeal\n // hand.boxRaw = box.scale(h.boxRaw, 1 / detectorExpandFact); // scale box down for visual appeal\n hand.landmarks = fingerPose.analyze(hand.keypoints) as HandResult['landmarks']; // calculate finger landmarks\n for (const key of Object.keys(fingerMap)) { // map keypoints to per-finger annotations\n hand.annotations[key] = fingerMap[key].map((index) => (hand.landmarks && hand.keypoints[index] ? hand.keypoints[index] : null));\n }\n }\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n }\n return hand;\n}\n\nexport async function predict(input: Tensor, config: Config): Promise {\n if (!models[0] || !models[1] || !models[0]?.inputs[0].shape || !models[1]?.inputs[0].shape) return []; // something is wrong with the model\n outputSize = [input.shape[2] || 0, input.shape[1] || 0];\n skipped++; // increment skip frames\n const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.hand.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame) {\n return cache.hands; // return cached results without running anything\n }\n return new Promise(async (resolve) => {\n const skipTimeExtended = 3 * (config.hand.skipTime || 0) > (now() - lastTime);\n const skipFrameExtended = skipped < 3 * (config.hand.skipFrames || 0);\n if (config.skipAllowed && cache.hands.length === config.hand.maxDetected) { // we have all detected hands so we're definitely skipping\n cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config)));\n } else if (config.skipAllowed && skipTimeExtended && skipFrameExtended && cache.hands.length > 0) { // we have some cached results: maybe not enough but anyhow continue for bit longer\n cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config)));\n } else { // finally rerun detector\n cache.boxes = await detectHands(input, config);\n lastTime = now();\n cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config)));\n skipped = 0;\n }\n\n const oldCache = [...cache.boxes];\n cache.boxes.length = 0; // reset cache\n if (config.cacheSensitivity > 0) {\n for (let i = 0; i < cache.hands.length; i++) {\n const boxKpt = box.square(cache.hands[i].keypoints, outputSize);\n if (boxKpt.box[2] / (input.shape[2] || 1) > 0.05 && boxKpt.box[3] / (input.shape[1] || 1) > 0.05 && cache.hands[i].fingerScore && cache.hands[i].fingerScore > (config.hand.minConfidence || 0)) {\n const boxScale = box.scale(boxKpt.box, boxExpandFact);\n const boxScaleRaw = box.scale(boxKpt.boxRaw, boxExpandFact);\n const boxCrop = box.crop(boxScaleRaw);\n cache.boxes.push({ ...oldCache[i], box: boxScale, boxRaw: boxScaleRaw, boxCrop });\n }\n }\n }\n for (let i = 0; i < cache.hands.length; i++) { // replace deteced boxes with calculated boxes in final output\n const bbox = box.calc(cache.hands[i].keypoints, outputSize);\n cache.hands[i].box = bbox.box;\n cache.hands[i].boxRaw = bbox.boxRaw;\n }\n resolve(cache.hands);\n });\n}\n", "export const kpt: Array = [ // used to create part labels\n 'nose',\n 'leftEye',\n 'rightEye',\n 'leftEar',\n 'rightEar',\n 'leftShoulder',\n 'rightShoulder',\n 'leftElbow',\n 'rightElbow',\n 'leftWrist',\n 'rightWrist',\n 'leftHip',\n 'rightHip',\n 'leftKnee',\n 'rightKnee',\n 'leftAnkle',\n 'rightAnkle',\n];\n\nexport const horizontal: Array = [ // used to fix left vs right\n ['leftEye', 'rightEye'],\n ['leftEar', 'rightEar'],\n ['leftShoulder', 'rightShoulder'],\n ['leftElbow', 'rightElbow'],\n ['leftWrist', 'rightWrist'],\n ['leftHip', 'rightHip'],\n ['leftKnee', 'rightKnee'],\n ['leftAnkle', 'rightAnkle'],\n];\n\nexport const vertical: Array = [ // used to remove unlikely keypoint positions\n ['leftKnee', 'leftShoulder'],\n ['rightKnee', 'rightShoulder'],\n ['leftAnkle', 'leftKnee'],\n ['rightAnkle', 'rightKnee'],\n];\n\nexport const relative: Array = [ // used to match relative body parts\n [['leftHip', 'rightHip'], ['leftShoulder', 'rightShoulder']],\n [['leftElbow', 'rightElbow'], ['leftShoulder', 'rightShoulder']],\n];\n\nexport const connected: Record = { // used to create body outline in annotations\n leftLeg: ['leftHip', 'leftKnee', 'leftAnkle'],\n rightLeg: ['rightHip', 'rightKnee', 'rightAnkle'],\n torso: ['leftShoulder', 'rightShoulder', 'rightHip', 'leftHip', 'leftShoulder'],\n leftArm: ['leftShoulder', 'leftElbow', 'leftWrist'],\n rightArm: ['rightShoulder', 'rightElbow', 'rightWrist'],\n head: [],\n};\n", "import type { BodyKeypoint, BodyResult } from '../result';\nimport * as box from '../util/box';\nimport * as coords from './movenetcoords';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { Tensor } from '../tfjs/types';\n\nconst maxJitter = 0.005; // default allowed jitter is within 0.5%\n\nconst cache: {\n keypoints: Array,\n padding: [number, number][];\n} = {\n keypoints: [],\n padding: [[0, 0], [0, 0], [0, 0], [0, 0]],\n};\n\nexport function bodyParts(body: BodyResult) { // model sometimes mixes up left vs right keypoints so we fix them\n for (const pair of coords.horizontal) { // fix body parts left vs right\n const left = body.keypoints.findIndex((kp) => kp.part === pair[0]);\n const right = body.keypoints.findIndex((kp) => kp.part === pair[1]);\n if (body.keypoints[left] && body.keypoints[right]) {\n if (body.keypoints[left].position[0] < body.keypoints[right].position[0]) {\n const tmp = body.keypoints[left];\n body.keypoints[left] = body.keypoints[right];\n body.keypoints[right] = tmp;\n }\n }\n }\n for (const pair of coords.vertical) { // remove body parts with improbable vertical position\n const lower = body.keypoints.findIndex((kp) => (kp && kp.part === pair[0]));\n const higher = body.keypoints.findIndex((kp) => (kp && kp.part === pair[1]));\n if (body.keypoints[lower] && body.keypoints[higher]) {\n if (body.keypoints[lower].position[1] < body.keypoints[higher].position[1]) {\n body.keypoints.splice(lower, 1);\n }\n }\n }\n for (const [pair, compare] of coords.relative) { // rearrange body parts according to their relative position\n const left = body.keypoints.findIndex((kp) => (kp && kp.part === pair[0]));\n const right = body.keypoints.findIndex((kp) => (kp && kp.part === pair[1]));\n const leftTo = body.keypoints.findIndex((kp) => (kp && kp.part === compare[0]));\n const rightTo = body.keypoints.findIndex((kp) => (kp && kp.part === compare[1]));\n if (!body.keypoints[leftTo] || !body.keypoints[rightTo]) continue; // only if we have both compare points\n const distanceLeft = body.keypoints[left] ? [\n Math.abs(body.keypoints[leftTo].position[0] - body.keypoints[left].position[0]),\n Math.abs(body.keypoints[rightTo].position[0] - body.keypoints[left].position[0]),\n ] : [0, 0];\n const distanceRight = body.keypoints[right] ? [\n Math.abs(body.keypoints[rightTo].position[0] - body.keypoints[right].position[0]),\n Math.abs(body.keypoints[leftTo].position[0] - body.keypoints[right].position[0]),\n ] : [0, 0];\n if (distanceLeft[0] > distanceLeft[1] || distanceRight[0] > distanceRight[1]) { // should flip keypoints\n const tmp = body.keypoints[left];\n body.keypoints[left] = body.keypoints[right];\n body.keypoints[right] = tmp;\n }\n }\n}\n\nexport function jitter(keypoints: Array): Array {\n for (let i = 0; i < keypoints.length; i++) {\n if (keypoints[i] && cache.keypoints[i]) {\n const diff = [Math.abs(keypoints[i].positionRaw[0] - cache.keypoints[i].positionRaw[0]), Math.abs(keypoints[i].positionRaw[1] - cache.keypoints[i].positionRaw[1])];\n if (diff[0] < maxJitter && diff[1] < maxJitter) {\n keypoints[i] = cache.keypoints[i]; // below jitter so replace keypoint\n } else {\n cache.keypoints[i] = keypoints[i]; // above jitter so update cache\n }\n } else {\n cache.keypoints[i] = keypoints[i]; // cache for keypoint doesnt exist so create it here\n }\n }\n return keypoints;\n}\n\nexport function padInput(input: Tensor, inputSize: number): Tensor {\n const t: Record = {};\n if (!input.shape || !input.shape[1] || !input.shape[2]) return input;\n cache.padding = [\n [0, 0], // dont touch batch\n [input.shape[2] > input.shape[1] ? Math.trunc((input.shape[2] - input.shape[1]) / 2) : 0, input.shape[2] > input.shape[1] ? Math.trunc((input.shape[2] - input.shape[1]) / 2) : 0], // height before&after\n [input.shape[1] > input.shape[2] ? Math.trunc((input.shape[1] - input.shape[2]) / 2) : 0, input.shape[1] > input.shape[2] ? Math.trunc((input.shape[1] - input.shape[2]) / 2) : 0], // width before&after\n [0, 0], // dont touch rbg\n ];\n t.pad = tf.pad(input, cache.padding);\n t.resize = tf.image.resizeBilinear(t.pad, [inputSize, inputSize]);\n const final = tf.cast(t.resize, 'int32');\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return final;\n}\n\nexport function rescaleBody(body: BodyResult, outputSize: [number, number]): BodyResult {\n body.keypoints = body.keypoints.filter((kpt) => kpt && kpt.position); // filter invalid keypoints\n for (const kpt of body.keypoints) {\n kpt.position = [\n kpt.position[0] * (outputSize[0] + cache.padding[2][0] + cache.padding[2][1]) / outputSize[0] - cache.padding[2][0],\n kpt.position[1] * (outputSize[1] + cache.padding[1][0] + cache.padding[1][1]) / outputSize[1] - cache.padding[1][0],\n ];\n kpt.positionRaw = [\n kpt.position[0] / outputSize[0], kpt.position[1] / outputSize[1],\n ];\n }\n const rescaledBoxes = box.calc(body.keypoints.map((pt) => pt.position), outputSize);\n body.box = rescaledBoxes.box;\n body.boxRaw = rescaledBoxes.boxRaw;\n return body;\n}\n", "/**\n * MoveNet model implementation\n *\n * Based on: [**MoveNet**](https://blog.tensorflow.org/2021/05/next-generation-pose-detection-with-movenet-and-tensorflowjs.html)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as box from '../util/box';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as coords from './movenetcoords';\nimport * as fix from './movenetfix';\nimport type { BodyKeypoint, BodyResult, Box, Point } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { fakeOps } from '../tfjs/backend';\nimport { env } from '../util/env';\n\nlet model: GraphModel | null;\nlet inputSize = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n// const boxExpandFact = 1.5; // increase to 150%\n\nconst cache: {\n boxes: Array, // unused\n bodies: Array;\n last: number,\n} = {\n boxes: [],\n bodies: [],\n last: 0,\n};\n\nexport async function load(config: Config): Promise {\n if (env.initial) model = null;\n if (!model) {\n fakeOps(['size'], config);\n model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;\n if (inputSize === -1) inputSize = 256;\n return model;\n}\n\nasync function parseSinglePose(res, config, image, inputBox) {\n const kpt = res[0][0];\n const keypoints: Array = [];\n let score = 0;\n for (let id = 0; id < kpt.length; id++) {\n score = kpt[id][2];\n if (score > config.body.minConfidence) {\n const positionRaw: Point = [\n (inputBox[3] - inputBox[1]) * kpt[id][1] + inputBox[1],\n (inputBox[2] - inputBox[0]) * kpt[id][0] + inputBox[0],\n ];\n keypoints.push({\n score: Math.round(100 * score) / 100,\n part: coords.kpt[id],\n positionRaw,\n position: [ // normalized to input image size\n Math.round((image.shape[2] || 0) * positionRaw[0]),\n Math.round((image.shape[1] || 0) * positionRaw[1]),\n ],\n });\n }\n }\n score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);\n const bodies: Array = [];\n const newBox = box.calc(keypoints.map((pt) => pt.position), [image.shape[2], image.shape[1]]);\n const annotations: Record = {};\n for (const [name, indexes] of Object.entries(coords.connected)) {\n const pt: Array = [];\n for (let i = 0; i < indexes.length - 1; i++) {\n const pt0 = keypoints.find((kp) => kp.part === indexes[i]);\n const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);\n if (pt0 && pt1 && pt0.score > (config.body.minConfidence || 0) && pt1.score > (config.body.minConfidence || 0)) pt.push([pt0.position, pt1.position]);\n }\n annotations[name] = pt;\n }\n const body: BodyResult = { id: 0, score, box: newBox.box, boxRaw: newBox.boxRaw, keypoints, annotations };\n fix.bodyParts(body);\n bodies.push(body);\n return bodies;\n}\n\nasync function parseMultiPose(res, config, image, inputBox) {\n const bodies: Array = [];\n for (let id = 0; id < res[0].length; id++) {\n const kpt = res[0][id];\n const totalScore = Math.round(100 * kpt[51 + 4]) / 100;\n if (totalScore > config.body.minConfidence) {\n const keypoints: Array = [];\n for (let i = 0; i < 17; i++) {\n const score = kpt[3 * i + 2];\n if (score > config.body.minConfidence) {\n const positionRaw: Point = [\n (inputBox[3] - inputBox[1]) * kpt[3 * i + 1] + inputBox[1],\n (inputBox[2] - inputBox[0]) * kpt[3 * i + 0] + inputBox[0],\n ];\n keypoints.push({\n part: coords.kpt[i],\n score: Math.round(100 * score) / 100,\n positionRaw,\n position: [Math.round((image.shape[2] || 0) * positionRaw[0]), Math.round((image.shape[1] || 0) * positionRaw[1])],\n });\n }\n }\n const newBox = box.calc(keypoints.map((pt) => pt.position), [image.shape[2], image.shape[1]]);\n // movenet-multipose has built-in box details\n // const boxRaw: Box = [kpt[51 + 1], kpt[51 + 0], kpt[51 + 3] - kpt[51 + 1], kpt[51 + 2] - kpt[51 + 0]];\n // const box: Box = [Math.trunc(boxRaw[0] * (image.shape[2] || 0)), Math.trunc(boxRaw[1] * (image.shape[1] || 0)), Math.trunc(boxRaw[2] * (image.shape[2] || 0)), Math.trunc(boxRaw[3] * (image.shape[1] || 0))];\n const annotations: Record = {};\n for (const [name, indexes] of Object.entries(coords.connected)) {\n const pt: Array = [];\n for (let i = 0; i < indexes.length - 1; i++) {\n const pt0 = keypoints.find((kp) => kp.part === indexes[i]);\n const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);\n if (pt0 && pt1 && pt0.score > (config.body.minConfidence || 0) && pt1.score > (config.body.minConfidence || 0)) pt.push([pt0.position, pt1.position]);\n }\n annotations[name] = pt;\n }\n const body: BodyResult = { id, score: totalScore, box: newBox.box, boxRaw: newBox.boxRaw, keypoints: [...keypoints], annotations };\n fix.bodyParts(body);\n bodies.push(body);\n }\n }\n bodies.sort((a, b) => b.score - a.score);\n if (bodies.length > config.body.maxDetected) bodies.length = config.body.maxDetected;\n return bodies;\n}\n\nexport async function predict(input: Tensor, config: Config): Promise {\n if (!model || !model?.inputs[0].shape) return []; // something is wrong with the model\n if (!config.skipAllowed) cache.boxes.length = 0; // allowed to use cache or not\n skipped++; // increment skip frames\n const skipTime = (config.body.skipTime || 0) > (now() - cache.last);\n const skipFrame = skipped < (config.body.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame) {\n return cache.bodies; // return cached results without running anything\n }\n return new Promise(async (resolve) => {\n const t: Record = {};\n skipped = 0;\n // run detection on squared input and cached boxes\n /*\n cache.bodies = []; // reset bodies result\n if (cache.boxes.length >= (config.body.maxDetected || 0)) { // if we have enough cached boxes run detection using cache\n for (let i = 0; i < cache.boxes.length; i++) { // run detection based on cached boxes\n t.crop = tf.image.cropAndResize(input, [cache.boxes[i]], [0], [inputSize, inputSize], 'bilinear');\n t.cast = tf.cast(t.crop, 'int32');\n // t.input = prepareImage(input);\n t.res = model?.execute(t.cast) as Tensor;\n const res = await t.res.array();\n const newBodies = (t.res.shape[2] === 17) ? await parseSinglePose(res, config, input, cache.boxes[i]) : await parseMultiPose(res, config, input, cache.boxes[i]);\n cache.bodies = cache.bodies.concat(newBodies);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n }\n }\n if (cache.bodies.length !== config.body.maxDetected) { // did not find enough bodies based on cached boxes so run detection on full frame\n t.input = prepareImage(input);\n t.res = model?.execute(t.input) as Tensor;\n const res = await t.res.array();\n cache.bodies = (t.res.shape[2] === 17) ? await parseSinglePose(res, config, input, [0, 0, 1, 1]) : await parseMultiPose(res, config, input, [0, 0, 1, 1]);\n for (const body of cache.bodies) rescaleBody(body, [input.shape[2] || 1, input.shape[1] || 1]);\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n }\n cache.boxes.length = 0; // reset cache\n for (let i = 0; i < cache.bodies.length; i++) {\n if (cache.bodies[i].keypoints.length > (coords.kpt.length / 2)) { // only update cache if we detected at least half keypoints\n const scaledBox = box.scale(cache.bodies[i].boxRaw, boxExpandFact);\n const cropBox = box.crop(scaledBox);\n cache.boxes.push(cropBox);\n }\n }\n */\n\n // run detection on squared input and no cached boxes\n t.input = fix.padInput(input, inputSize);\n t.res = model?.execute(t.input) as Tensor;\n cache.last = now();\n const res = await t.res.array();\n cache.bodies = (t.res.shape[2] === 17)\n ? await parseSinglePose(res, config, input, [0, 0, 1, 1])\n : await parseMultiPose(res, config, input, [0, 0, 1, 1]);\n for (const body of cache.bodies) {\n fix.rescaleBody(body, [input.shape[2] || 1, input.shape[1] || 1]);\n fix.jitter(body.keypoints);\n }\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n\n resolve(cache.bodies);\n });\n}\n", "/**\n * NanoDet object detection model implementation\n *\n * Based on: [**MB3-CenterNet**](https://github.com/610265158/mobilenetv3_centernet)\n */\n\nimport { log, join, now } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport { labels } from './labels';\nimport type { ObjectResult, Box } from '../result';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\n\nlet model;\nlet last: Array = [];\nlet lastTime = 0;\nlet skipped = Number.MAX_SAFE_INTEGER;\n\nconst scaleBox = 2.5; // increase box size\n\nexport async function load(config: Config): Promise {\n if (!model || env.initial) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath || ''));\n const inputs = Object.values(model.modelSignature['inputs']);\n model.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;\n if (!model.inputSize) throw new Error(`cannot determine model inputSize: ${config.object.modelPath}`);\n if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath);\n else if (config.debug) log('load model:', model.modelUrl);\n } else if (config.debug) log('cached model:', model.modelUrl);\n return model;\n}\n\nasync function process(res, inputSize, outputShape, config) {\n let id = 0;\n let results: Array = [];\n for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects\n // find scores, boxes, classes\n tf.tidy(async () => { // wrap in tidy to automatically deallocate temp tensors\n const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704\n // find boxes and scores output depending on stride\n const scoresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] === labels.length))?.squeeze();\n const featuresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] < labels.length))?.squeeze();\n const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]); // reshape [output] to [4, output / 4] where number is number of different features inside each stride\n const boxIdx = await boxesMax.argMax(2).array(); // what we need is indexes of features with highest scores, not values itself\n const scores = await scoresT.array(); // optionally use exponential scores or just as-is\n for (let i = 0; i < scoresT.shape[0]; i++) { // total strides (x * y matrix)\n for (let j = 0; j < scoresT.shape[1]; j++) { // one score for each class\n const score = scores[i][j]; // get score for current position\n if (score > config.object.minConfidence && j !== 61) {\n const cx = (0.5 + Math.trunc(i % baseSize)) / baseSize; // center.x normalized to range 0..1\n const cy = (0.5 + Math.trunc(i / baseSize)) / baseSize; // center.y normalized to range 0..1\n const boxOffset = boxIdx[i].map((a) => a * (baseSize / strideSize / inputSize)); // just grab indexes of features with highest scores\n const [x, y] = [\n cx - (scaleBox / strideSize * boxOffset[0]),\n cy - (scaleBox / strideSize * boxOffset[1]),\n ];\n const [w, h] = [\n cx + (scaleBox / strideSize * boxOffset[2]) - x,\n cy + (scaleBox / strideSize * boxOffset[3]) - y,\n ];\n let boxRaw: Box = [x, y, w, h]; // results normalized to range 0..1\n boxRaw = boxRaw.map((a) => Math.max(0, Math.min(a, 1))) as Box; // fix out-of-bounds coords\n const box = [ // results normalized to input image pixels\n boxRaw[0] * outputShape[0],\n boxRaw[1] * outputShape[1],\n boxRaw[2] * outputShape[0],\n boxRaw[3] * outputShape[1],\n ];\n const result = {\n id: id++,\n // strideSize,\n score: Math.round(100 * score) / 100,\n class: j + 1,\n label: labels[j].label,\n // center: [Math.trunc(outputShape[0] * cx), Math.trunc(outputShape[1] * cy)],\n // centerRaw: [cx, cy],\n box: box.map((a) => Math.trunc(a)) as Box,\n boxRaw,\n };\n results.push(result);\n }\n }\n }\n });\n }\n // deallocate tensors\n res.forEach((t) => tf.dispose(t));\n\n // normally nms is run on raw results, but since boxes need to be calculated this way we skip calulcation of\n // unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered)\n const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms\n const nmsScores = results.map((a) => a.score);\n let nmsIdx: Array = [];\n if (nmsBoxes && nmsBoxes.length > 0) {\n const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);\n nmsIdx = await nms.data();\n tf.dispose(nms);\n }\n\n // filter & sort results\n results = results\n .filter((_val, idx) => nmsIdx.includes(idx))\n .sort((a, b) => (b.score - a.score));\n\n return results;\n}\n\nexport async function predict(image: Tensor, config: Config): Promise {\n const skipTime = (config.object.skipTime || 0) > (now() - lastTime);\n const skipFrame = skipped < (config.object.skipFrames || 0);\n if (config.skipAllowed && skipTime && skipFrame && (last.length > 0)) {\n skipped++;\n return last;\n }\n skipped = 0;\n if (!env.kernels.includes('mod') || !env.kernels.includes('sparsetodense')) return last;\n return new Promise(async (resolve) => {\n const outputSize = [image.shape[2], image.shape[1]];\n const resize = tf.image.resizeBilinear(image, [model.inputSize, model.inputSize], false);\n const norm = tf.div(resize, 255);\n const transpose = norm.transpose([0, 3, 1, 2]);\n tf.dispose(norm);\n tf.dispose(resize);\n\n let objectT;\n if (config.object.enabled) objectT = model.execute(transpose);\n lastTime = now();\n tf.dispose(transpose);\n\n const obj = await process(objectT, model.inputSize, outputSize, config);\n last = obj;\n resolve(obj);\n });\n}\n", "/**\n * PoseNet body detection model implementation constants\n * See `posenet.ts` for entry point\n */\n\nimport type { BodyResult } from '../result';\n\nexport const partNames = [\n 'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder',\n 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist',\n 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',\n];\n\nexport const count = partNames.length; // 17 keypoints\n\nexport const partIds = partNames.reduce((result, jointName, i) => {\n result[jointName] = i;\n return result;\n}, {});\n\nconst connectedPartNames = [\n ['leftHip', 'leftShoulder'], ['leftElbow', 'leftShoulder'],\n ['leftElbow', 'leftWrist'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['rightHip', 'rightShoulder'],\n ['rightElbow', 'rightShoulder'], ['rightElbow', 'rightWrist'],\n ['rightHip', 'rightKnee'], ['rightKnee', 'rightAnkle'],\n ['leftShoulder', 'rightShoulder'], ['leftHip', 'rightHip'],\n];\nexport const connectedPartIndices = connectedPartNames.map(([jointNameA, jointNameB]) => ([partIds[jointNameA], partIds[jointNameB]]));\n\nexport const poseChain = [\n ['nose', 'leftEye'], ['leftEye', 'leftEar'], ['nose', 'rightEye'],\n ['rightEye', 'rightEar'], ['nose', 'leftShoulder'],\n ['leftShoulder', 'leftElbow'], ['leftElbow', 'leftWrist'],\n ['leftShoulder', 'leftHip'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['nose', 'rightShoulder'],\n ['rightShoulder', 'rightElbow'], ['rightElbow', 'rightWrist'],\n ['rightShoulder', 'rightHip'], ['rightHip', 'rightKnee'],\n ['rightKnee', 'rightAnkle'],\n];\n\nexport function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number) {\n return (a < minConfidence || b < minConfidence);\n}\n\nexport function getAdjacentKeyPoints(keypoints, minConfidence: number) {\n return connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {\n if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {\n return result;\n }\n result.push([keypoints[leftJoint], keypoints[rightJoint]]);\n return result;\n }, []);\n}\n\nexport function getBoundingBox(keypoints): [number, number, number, number] {\n const coord = keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({\n maxX: Math.max(maxX, x),\n maxY: Math.max(maxY, y),\n minX: Math.min(minX, x),\n minY: Math.min(minY, y),\n }), {\n maxX: Number.NEGATIVE_INFINITY,\n maxY: Number.NEGATIVE_INFINITY,\n minX: Number.POSITIVE_INFINITY,\n minY: Number.POSITIVE_INFINITY,\n });\n return [coord.minX, coord.minY, coord.maxX - coord.minX, coord.maxY - coord.minY];\n}\n\nexport function scalePoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]): Array {\n const scaleY = height / inputResolutionHeight;\n const scaleX = width / inputResolutionWidth;\n const scalePose = (pose, i) => ({\n id: i,\n score: pose.score,\n boxRaw: [pose.box[0] / inputResolutionWidth, pose.box[1] / inputResolutionHeight, pose.box[2] / inputResolutionWidth, pose.box[3] / inputResolutionHeight],\n box: [Math.trunc(pose.box[0] * scaleX), Math.trunc(pose.box[1] * scaleY), Math.trunc(pose.box[2] * scaleX), Math.trunc(pose.box[3] * scaleY)],\n keypoints: pose.keypoints.map(({ score, part, position }) => ({\n score,\n part,\n position: [Math.trunc(position.x * scaleX), Math.trunc(position.y * scaleY)],\n positionRaw: [position.x / inputResolutionHeight, position.y / inputResolutionHeight],\n })),\n });\n const scaledPoses = poses.map((pose, i) => scalePose(pose, i));\n return scaledPoses;\n}\n\n// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort\nexport class MaxHeap {\n priorityQueue: Array; // don't touch\n numberOfElements: number;\n getElementValue: unknown; // function call\n\n constructor(maxSize, getElementValue) {\n this.priorityQueue = new Array(maxSize);\n this.numberOfElements = -1;\n this.getElementValue = getElementValue;\n }\n\n enqueue(x) {\n this.priorityQueue[++this.numberOfElements] = x;\n this.swim(this.numberOfElements);\n }\n\n dequeue() {\n const max = this.priorityQueue[0];\n this.exchange(0, this.numberOfElements--);\n this.sink(0);\n this.priorityQueue[this.numberOfElements + 1] = null;\n return max;\n }\n\n empty() { return this.numberOfElements === -1; }\n\n size() { return this.numberOfElements + 1; }\n\n all() { return this.priorityQueue.slice(0, this.numberOfElements + 1); }\n\n max() { return this.priorityQueue[0]; }\n\n swim(k) {\n while (k > 0 && this.less(Math.floor(k / 2), k)) {\n this.exchange(k, Math.floor(k / 2));\n k = Math.floor(k / 2);\n }\n }\n\n sink(k) {\n while (2 * k <= this.numberOfElements) {\n let j = 2 * k;\n if (j < this.numberOfElements && this.less(j, j + 1)) j++;\n if (!this.less(k, j)) break;\n this.exchange(k, j);\n k = j;\n }\n }\n\n getValueAt(i) {\n // @ts-ignore getter is of unknown type\n return this.getElementValue(this.priorityQueue[i]);\n }\n\n less(i, j) {\n return this.getValueAt(i) < this.getValueAt(j);\n }\n\n exchange(i, j) {\n const t = this.priorityQueue[i];\n this.priorityQueue[i] = this.priorityQueue[j];\n this.priorityQueue[j] = t;\n }\n}\n\nexport function getOffsetPoint(y, x, keypoint, offsets) {\n return {\n y: offsets.get(y, x, keypoint),\n x: offsets.get(y, x, keypoint + count),\n };\n}\n\nexport function getImageCoords(part, outputStride, offsets) {\n const { heatmapY, heatmapX, id: keypoint } = part;\n const { y, x } = getOffsetPoint(heatmapY, heatmapX, keypoint, offsets);\n return {\n x: part.heatmapX * outputStride + x,\n y: part.heatmapY * outputStride + y,\n };\n}\n\nexport function fillArray(element, size) {\n const result = new Array(size);\n for (let i = 0; i < size; i++) {\n result[i] = element;\n }\n return result;\n}\n\nexport function clamp(a, min, max) {\n if (a < min) return min;\n if (a > max) return max;\n return a;\n}\n\nexport function squaredDistance(y1, x1, y2, x2) {\n const dy = y2 - y1;\n const dx = x2 - x1;\n return dy * dy + dx * dx;\n}\n\nexport function addVectors(a, b) {\n return { x: a.x + b.x, y: a.y + b.y };\n}\n\nexport function clampVector(a, min, max) {\n return { y: clamp(a.y, min, max), x: clamp(a.x, min, max) };\n}\n", "/**\n * PoseNet body detection model implementation\n *\n * Based on: [**PoseNet**](https://medium.com/tensorflow/real-time-human-pose-estimation-in-the-browser-with-tensorflow-js-7dd0bc881cd5)\n */\n\nimport { log, join } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport type { BodyResult, Box } from '../result';\nimport type { Tensor, GraphModel } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\nimport * as utils from './posenetutils';\n\nlet model: GraphModel;\nconst poseNetOutputs = ['MobilenetV1/offset_2/BiasAdd'/* offsets */, 'MobilenetV1/heatmap_2/BiasAdd'/* heatmapScores */, 'MobilenetV1/displacement_fwd_2/BiasAdd'/* displacementFwd */, 'MobilenetV1/displacement_bwd_2/BiasAdd'/* displacementBwd */];\n\nconst localMaximumRadius = 1;\nconst outputStride = 16;\nconst squaredNmsRadius = 50 ** 2;\n\nfunction traverse(edgeId, sourceKeypoint, targetId, scores, offsets, displacements, offsetRefineStep = 2) {\n const getDisplacement = (point) => ({\n y: displacements.get(point.y, point.x, edgeId),\n x: displacements.get(point.y, point.x, (displacements.shape[2] / 2) + edgeId),\n });\n const getStridedIndexNearPoint = (point, height, width) => ({\n y: utils.clamp(Math.round(point.y / outputStride), 0, height - 1),\n x: utils.clamp(Math.round(point.x / outputStride), 0, width - 1),\n });\n\n const [height, width] = scores.shape;\n // Nearest neighbor interpolation for the source->target displacements.\n const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, height, width);\n const displacement = getDisplacement(sourceKeypointIndices);\n const displacedPoint = utils.addVectors(sourceKeypoint.position, displacement);\n let targetKeypoint = displacedPoint;\n for (let i = 0; i < offsetRefineStep; i++) {\n const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, height, width);\n const offsetPoint = utils.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetId, offsets);\n targetKeypoint = utils.addVectors(\n { x: targetKeypointIndices.x * outputStride, y: targetKeypointIndices.y * outputStride },\n { x: offsetPoint.x, y: offsetPoint.y },\n );\n }\n const targetKeyPointIndices = getStridedIndexNearPoint(targetKeypoint, height, width);\n const score = scores.get(targetKeyPointIndices.y, targetKeyPointIndices.x, targetId);\n return { position: targetKeypoint, part: utils.partNames[targetId], score };\n}\n\nexport function decodePose(root, scores, offsets, displacementsFwd, displacementsBwd) {\n const tuples = utils.poseChain.map(([parentJoinName, childJoinName]) => ([utils.partIds[parentJoinName], utils.partIds[childJoinName]]));\n const edgesFwd = tuples.map(([, childJointId]) => childJointId);\n const edgesBwd = tuples.map(([parentJointId]) => parentJointId);\n const numParts = scores.shape[2]; // [21,21,17]\n const numEdges = edgesFwd.length;\n const keypoints = new Array(numParts);\n // Start a new detection instance at the position of the root.\n const rootPoint = utils.getImageCoords(root.part, outputStride, offsets);\n keypoints[root.part.id] = {\n score: root.score,\n part: utils.partNames[root.part.id],\n position: rootPoint,\n };\n // Decode the part positions upwards in the tree, following the backward displacements.\n for (let edge = numEdges - 1; edge >= 0; --edge) {\n const sourceId = edgesFwd[edge];\n const targetId = edgesBwd[edge];\n if (keypoints[sourceId] && !keypoints[targetId]) {\n keypoints[targetId] = traverse(edge, keypoints[sourceId], targetId, scores, offsets, displacementsBwd);\n }\n }\n // Decode the part positions downwards in the tree, following the forward displacements.\n for (let edge = 0; edge < numEdges; ++edge) {\n const sourceId = edgesBwd[edge];\n const targetId = edgesFwd[edge];\n if (keypoints[sourceId] && !keypoints[targetId]) {\n keypoints[targetId] = traverse(edge, keypoints[sourceId], targetId, scores, offsets, displacementsFwd);\n }\n }\n return keypoints;\n}\n\nfunction scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, scores) {\n const [height, width] = scores.shape;\n let localMaximum = true;\n const yStart = Math.max(heatmapY - localMaximumRadius, 0);\n const yEnd = Math.min(heatmapY + localMaximumRadius + 1, height);\n for (let yCurrent = yStart; yCurrent < yEnd; ++yCurrent) {\n const xStart = Math.max(heatmapX - localMaximumRadius, 0);\n const xEnd = Math.min(heatmapX + localMaximumRadius + 1, width);\n for (let xCurrent = xStart; xCurrent < xEnd; ++xCurrent) {\n if (scores.get(yCurrent, xCurrent, keypointId) > score) {\n localMaximum = false;\n break;\n }\n }\n if (!localMaximum) break;\n }\n return localMaximum;\n}\n\nexport function buildPartWithScoreQueue(minConfidence, scores) {\n const [height, width, numKeypoints] = scores.shape;\n const queue = new utils.MaxHeap(height * width * numKeypoints, ({ score }) => score);\n for (let heatmapY = 0; heatmapY < height; ++heatmapY) {\n for (let heatmapX = 0; heatmapX < width; ++heatmapX) {\n for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {\n const score = scores.get(heatmapY, heatmapX, keypointId);\n // Only consider parts with score greater or equal to threshold as root candidates.\n if (score < minConfidence) continue;\n // Only consider keypoints whose score is maximum in a local window.\n if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, scores)) queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });\n }\n }\n }\n return queue;\n}\n\nfunction withinRadius(poses, { x, y }, keypointId) {\n return poses.some(({ keypoints }) => {\n const correspondingKeypoint = keypoints[keypointId]?.position;\n if (!correspondingKeypoint) return false;\n return utils.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;\n });\n}\n\nfunction getInstanceScore(existingPoses, keypoints) {\n const notOverlappedKeypointScores = keypoints.reduce((result, { position, score }, keypointId) => {\n if (!withinRadius(existingPoses, position, keypointId)) result += score;\n return result;\n }, 0.0);\n return notOverlappedKeypointScores / keypoints.length;\n}\n\nexport function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) {\n const poses: Array<{ keypoints, box: Box, score: number }> = [];\n const queue = buildPartWithScoreQueue(minConfidence, scores);\n // Generate at most maxDetected object instances per image in decreasing root part score order.\n while (poses.length < maxDetected && !queue.empty()) {\n // The top element in the queue is the next root candidate.\n const root = queue.dequeue();\n // Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.\n // @ts-ignore this one is tree walk\n const rootImageCoords = utils.getImageCoords(root.part, outputStride, offsets);\n // @ts-ignore this one is tree walk\n if (withinRadius(poses, rootImageCoords, root.part.id)) continue;\n // Else start a new detection instance at the position of the root.\n let keypoints = decodePose(root, scores, offsets, displacementsFwd, displacementsBwd);\n keypoints = keypoints.filter((a) => a.score > minConfidence);\n const score = getInstanceScore(poses, keypoints);\n const box = utils.getBoundingBox(keypoints);\n if (score > minConfidence) poses.push({ keypoints, box, score: Math.round(100 * score) / 100 });\n }\n return poses;\n}\n\nexport async function predict(input: Tensor, config: Config): Promise {\n /** posenet is mostly obsolete\n * caching is not implemented\n */\n const res = tf.tidy(() => {\n if (!model.inputs[0].shape) return [];\n const resized = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]]);\n const normalized = tf.sub(tf.div(tf.cast(resized, 'float32'), 127.5), 1.0);\n const results: Array = model.execute(normalized, poseNetOutputs) as Array;\n const results3d = results.map((y) => tf.squeeze(y, [0]));\n results3d[1] = results3d[1].sigmoid(); // apply sigmoid on scores\n return results3d;\n });\n\n const buffers = await Promise.all(res.map((tensor: Tensor) => tensor.buffer()));\n for (const t of res) tf.dispose(t);\n\n const decoded = await decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);\n if (!model.inputs[0].shape) return [];\n const scaled = utils.scalePoses(decoded, [input.shape[1], input.shape[2]], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) as BodyResult[];\n return scaled;\n}\n\nexport async function load(config: Config): Promise {\n if (!model || env.initial) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n", "/**\n * Image segmentation for body detection model\n *\n * Based on:\n * - [**MediaPipe Meet**](https://drive.google.com/file/d/1lnP1bRi9CSqQQXUHa13159vLELYDgDu0/preview)\n * - [**MediaPipe Selfie**](https://drive.google.com/file/d/1dCfozqknMa068vVsO2j_1FgZkW_e3VWv/preview)\n */\n\nimport { log, join } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as image from '../image/image';\nimport type { GraphModel, Tensor } from '../tfjs/types';\nimport type { Config } from '../config';\nimport { env } from '../util/env';\nimport type { Input } from '../exports';\n\nlet model: GraphModel;\nlet busy = false;\n\nexport async function load(config: Config): Promise {\n if (!model || env.initial) {\n model = await tf.loadGraphModel(join(config.modelBasePath, config.segmentation.modelPath || '')) as unknown as GraphModel;\n if (!model || !model['modelUrl']) log('load model failed:', config.segmentation.modelPath);\n else if (config.debug) log('load model:', model['modelUrl']);\n } else if (config.debug) log('cached model:', model['modelUrl']);\n return model;\n}\n\nexport async function process(input: Input, background: Input | undefined, config: Config)\n: Promise<{ data: Array, canvas: HTMLCanvasElement | OffscreenCanvas | null, alpha: HTMLCanvasElement | OffscreenCanvas | null }> {\n if (busy) return { data: [], canvas: null, alpha: null };\n busy = true;\n if (!model) await load(config);\n const inputImage = image.process(input, config);\n const width = inputImage.canvas?.width || 0;\n const height = inputImage.canvas?.height || 0;\n if (!inputImage.tensor) return { data: [], canvas: null, alpha: null };\n const t: Record = {};\n\n t.resize = tf.image.resizeBilinear(inputImage.tensor, [model.inputs[0].shape ? model.inputs[0].shape[1] : 0, model.inputs[0].shape ? model.inputs[0].shape[2] : 0], false);\n tf.dispose(inputImage.tensor);\n t.norm = tf.div(t.resize, 255);\n t.res = model.execute(t.norm) as Tensor;\n\n t.squeeze = tf.squeeze(t.res, 0); // meet.shape:[1,256,256,1], selfie.shape:[1,144,256,2]\n if (t.squeeze.shape[2] === 2) {\n t.softmax = tf.softmax(t.squeeze); // model meet has two channels for fg and bg\n [t.bg, t.fg] = tf.unstack(t.softmax, 2);\n t.expand = tf.expandDims(t.fg, 2);\n t.pad = tf.expandDims(t.expand, 0);\n t.crop = tf.image.cropAndResize(t.pad, [[0, 0, 0.5, 0.5]], [0], [width, height]);\n // running sofmax before unstack creates 2x2 matrix so we only take upper-left quadrant\n // otherwise run softmax after unstack and use standard resize\n // resizeOutput = tf.image.resizeBilinear(expand, [input.tensor?.shape[1], input.tensor?.shape[2]]);\n t.data = tf.squeeze(t.crop, 0);\n } else {\n t.data = tf.image.resizeBilinear(t.squeeze, [height, width]); // model selfie has a single channel that we can use directly\n }\n const data = Array.from(await t.data.data()) as number[];\n\n if (env.node && !env.Canvas && (typeof ImageData === 'undefined')) {\n if (config.debug) log('canvas support missing');\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n return { data, canvas: null, alpha: null }; // running in nodejs so return alpha array as-is\n }\n\n const alphaCanvas = image.canvas(width, height);\n await tf.browser.toPixels(t.data, alphaCanvas);\n const alphaCtx = alphaCanvas.getContext('2d') as CanvasRenderingContext2D;\n if (config.segmentation.blur && config.segmentation.blur > 0) alphaCtx.filter = `blur(${config.segmentation.blur}px)`; // use css filter for bluring, can be done with gaussian blur manually instead\n const alphaData = alphaCtx.getImageData(0, 0, width, height);\n\n const compositeCanvas = image.canvas(width, height);\n const compositeCtx = compositeCanvas.getContext('2d') as CanvasRenderingContext2D;\n if (inputImage.canvas) compositeCtx.drawImage(inputImage.canvas, 0, 0);\n compositeCtx.globalCompositeOperation = 'darken'; // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation // best options are: darken, color-burn, multiply\n if (config.segmentation.blur && config.segmentation.blur > 0) compositeCtx.filter = `blur(${config.segmentation.blur}px)`; // use css filter for bluring, can be done with gaussian blur manually instead\n compositeCtx.drawImage(alphaCanvas, 0, 0);\n compositeCtx.globalCompositeOperation = 'source-over'; // reset composite operation\n compositeCtx.filter = 'none'; // reset css filter\n const compositeData = compositeCtx.getImageData(0, 0, width, height);\n for (let i = 0; i < width * height; i++) compositeData.data[4 * i + 3] = alphaData.data[4 * i + 0]; // copy original alpha value to new composite canvas\n compositeCtx.putImageData(compositeData, 0, 0);\n\n let mergedCanvas: HTMLCanvasElement | OffscreenCanvas | null = null;\n if (background && compositeCanvas) { // draw background with segmentation as overlay if background is present\n mergedCanvas = image.canvas(width, height);\n const bgImage = image.process(background, config);\n tf.dispose(bgImage.tensor);\n const ctxMerge = mergedCanvas.getContext('2d') as CanvasRenderingContext2D;\n ctxMerge.drawImage(bgImage.canvas as HTMLCanvasElement, 0, 0, mergedCanvas.width, mergedCanvas.height);\n ctxMerge.drawImage(compositeCanvas, 0, 0);\n }\n\n Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));\n busy = false;\n\n return { data, canvas: mergedCanvas || compositeCanvas, alpha: alphaCanvas };\n}\n", "/**\n * Loader and Validator for all models used by Human\n */\n\nimport { env } from './util/env';\nimport { log } from './util/util';\nimport * as agegenderrace from './gear/gear-agegenderrace';\nimport * as antispoof from './face/antispoof';\nimport * as blazeface from './face/blazeface';\nimport * as blazepose from './body/blazepose';\nimport * as centernet from './object/centernet';\nimport * as efficientpose from './body/efficientpose';\nimport * as emotion from './gear/emotion';\nimport * as facemesh from './face/facemesh';\nimport * as faceres from './face/faceres';\nimport * as handpose from './hand/handpose';\nimport * as handtrack from './hand/handtrack';\nimport * as iris from './face/iris';\nimport * as movenet from './body/movenet';\nimport * as nanodet from './object/nanodet';\nimport * as posenet from './body/posenet';\nimport * as segmentation from './segmentation/segmentation';\nimport type { GraphModel } from './tfjs/types';\nimport type { Human } from './human';\n\n/** Instances of all possible TFJS Graph Models used by Human\n * - loaded as needed based on configuration\n * - initialized explictly with `human.load()` method\n * - initialized implicity on first call to `human.detect()`\n * - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading\n */\nexport class Models {\n age: null | GraphModel | Promise = null;\n agegenderrace: null | GraphModel | Promise = null;\n blazeposedetect: null | GraphModel | Promise = null;\n blazepose: null | GraphModel | Promise = null;\n centernet: null | GraphModel | Promise = null;\n efficientpose: null | GraphModel | Promise = null;\n embedding: null | GraphModel | Promise = null;\n emotion: null | GraphModel | Promise = null;\n facedetect: null | GraphModel | Promise = null;\n faceiris: null | GraphModel | Promise = null;\n facemesh: null | GraphModel | Promise = null;\n faceres: null | GraphModel | Promise = null;\n gender: null | GraphModel | Promise = null;\n handpose: null | GraphModel | Promise = null;\n handskeleton: null | GraphModel | Promise = null;\n handtrack: null | GraphModel | Promise = null;\n movenet: null | GraphModel | Promise = null;\n nanodet: null | GraphModel | Promise = null;\n posenet: null | GraphModel | Promise = null;\n segmentation: null | GraphModel | Promise = null;\n antispoof: null | GraphModel | Promise = null;\n}\n\nexport function reset(instance: Human): void {\n // if (instance.config.debug) log('resetting loaded models');\n for (const model of Object.keys(instance.models)) instance.models[model] = null;\n}\n\n/** Load method preloads all instance.configured models on-demand */\nexport async function load(instance: Human): Promise {\n if (env.initial) reset(instance);\n if (instance.config.hand.enabled) { // handpose model is a combo that must be loaded as a whole\n if (!instance.models.handpose && instance.config.hand.detector?.modelPath?.includes('handdetect')) [instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config);\n if (!instance.models.handskeleton && instance.config.hand.landmarks && instance.config.hand.detector?.modelPath?.includes('handdetect')) [instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config);\n }\n if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config);\n if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config);\n if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config);\n if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config);\n if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);\n if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);\n if (instance.config.body.enabled && !instance.models.posenet && instance.config.body?.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config);\n if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);\n if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config);\n if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body.detector?.modelPath && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazeposedetect = blazepose.loadDetect(instance.config);\n if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);\n if (instance.config.body.enabled && !instance.models.movenet && instance.config.body?.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config);\n if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object?.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config);\n if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config);\n if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config);\n if (instance.config.face.enabled && instance.config.face.description?.enabled && !instance.models.faceres) instance.models.faceres = faceres.load(instance.config);\n if (instance.config.segmentation.enabled && !instance.models.segmentation) instance.models.segmentation = segmentation.load(instance.config);\n if (instance.config.face.enabled && instance.config.face['agegenderrace']?.enabled && !instance.models.agegenderrace) instance.models.agegenderrace = agegenderrace.load(instance.config);\n\n // models are loaded in parallel asynchronously so lets wait until they are actually loaded\n for await (const model of Object.keys(instance.models)) {\n if (instance.models[model] && typeof instance.models[model] !== 'undefined') instance.models[model] = await instance.models[model];\n }\n}\n\nexport async function validate(instance: Human): Promise {\n interface Op { name: string, category: string, op: string }\n const simpleOps = ['const', 'placeholder', 'noop', 'pad', 'squeeze', 'add', 'sub', 'mul', 'div'];\n for (const defined of Object.keys(instance.models)) {\n if (instance.models[defined]) { // check if model is loaded\n let models: GraphModel[] = [];\n if (Array.isArray(instance.models[defined])) {\n models = instance.models[defined]\n .filter((model) => (model !== null))\n .map((model) => ((model && model.executor) ? model : model.model));\n } else {\n models = [instance.models[defined]];\n }\n for (const model of models) {\n if (!model) {\n if (instance.config.debug) log('model marked as loaded but not defined:', defined);\n continue;\n }\n const ops: string[] = [];\n // @ts-ignore // executor is a private method\n const executor = model?.executor;\n if (executor && executor.graph.nodes) {\n for (const kernel of Object.values(executor.graph.nodes)) {\n const op = (kernel as Op).op.toLowerCase();\n if (!ops.includes(op)) ops.push(op);\n }\n } else {\n if (!executor && instance.config.debug) log('model signature not determined:', defined);\n }\n const missing: string[] = [];\n for (const op of ops) {\n if (!simpleOps.includes(op) // exclude simple ops\n && !instance.env.kernels.includes(op) // check actual kernel ops\n && !instance.env.kernels.includes(op.replace('_', '')) // check variation without _\n && !instance.env.kernels.includes(op.replace('native', '')) // check standard variation\n && !instance.env.kernels.includes(op.replace('v2', ''))) { // check non-versioned variation\n missing.push(op);\n }\n }\n // log('model validation ops:', defined, ops);\n if (missing.length > 0 && instance.config.debug) log('model validation:', defined, missing);\n }\n }\n }\n}\n", "/** TFJS custom backend registration */\n\nimport { log } from '../util/util';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as image from '../image/image';\nimport * as models from '../models';\n// import { env } from '../env';\n\nexport const config = {\n name: 'humangl',\n priority: 999,\n canvas: null,\n gl: null,\n extensions: [],\n webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: false,\n desynchronized: true,\n },\n};\n\nfunction extensions(): void {\n /*\n https://www.khronos.org/registry/webgl/extensions/\n https://webglreport.com/?v=2\n */\n const gl = config.gl;\n if (!gl) return;\n config.extensions = gl.getSupportedExtensions() as string[];\n // gl.getExtension('KHR_parallel_shader_compile');\n}\n\n/**\n * Registers custom WebGL2 backend to be used by Human library\n *\n * @returns void\n */\nexport async function register(instance): Promise {\n // force backend reload if gl context is not valid\n if (instance.config.backend !== 'humangl') return;\n if ((config.name in tf.engine().registry) && (!config.gl || !config.gl.getParameter(config.gl.VERSION))) {\n log('error: humangl backend invalid context');\n models.reset(instance);\n /*\n log('resetting humangl backend');\n await tf.removeBackend(config.name);\n await register(instance); // re-register\n */\n }\n if (!tf.findBackend(config.name)) {\n try {\n config.canvas = await image.canvas(100, 100);\n } catch (err) {\n log('error: cannot create canvas:', err);\n return;\n }\n try {\n config.gl = config.canvas?.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext;\n if (config.canvas) {\n config.canvas.addEventListener('webglcontextlost', async (e) => {\n log('error: humangl:', e.type);\n // log('gpu memory usage:', instance.tf.engine().backendInstance.numBytesInGPU);\n log('possible browser memory leak using webgl or conflict with multiple backend registrations');\n instance.emit('error');\n throw new Error('browser webgl error');\n /*\n log('resetting humangl backend');\n env.initial = true;\n models.reset(instance);\n await tf.removeBackend(config.name);\n await register(instance); // re-register\n */\n });\n config.canvas.addEventListener('webglcontextrestored', (e) => {\n log('error: humangl context restored:', e);\n });\n config.canvas.addEventListener('webglcontextcreationerror', (e) => {\n log('error: humangl context create:', e);\n });\n }\n } catch (err) {\n log('error: cannot get WebGL context:', err);\n return;\n }\n try {\n tf.setWebGLContext(2, config.gl);\n } catch (err) {\n log('error: cannot set WebGL context:', err);\n return;\n }\n try {\n const ctx = new tf.GPGPUContext(config.gl);\n tf.registerBackend(config.name, () => new tf.MathBackendWebGL(ctx), config.priority);\n } catch (err) {\n log('error: cannot register WebGL backend:', err);\n return;\n }\n try {\n const kernels = tf.getKernelsForBackend('webgl');\n kernels.forEach((kernelConfig) => {\n const newKernelConfig = { ...kernelConfig, backendName: config.name };\n tf.registerKernel(newKernelConfig);\n });\n } catch (err) {\n log('error: cannot update WebGL backend registration:', err);\n return;\n }\n const current = tf.backend().getGPGPUContext ? tf.backend().getGPGPUContext().gl : null;\n if (current) {\n log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);\n } else {\n log('error: no current gl context:', current, config.gl);\n return;\n }\n try {\n tf.ENV.set('WEBGL_VERSION', 2);\n } catch (err) {\n log('error: cannot set WebGL backend flags:', err);\n return;\n }\n extensions();\n log('backend registered:', config.name);\n }\n}\n", "/** TFJS backend initialization and customization */\n\nimport { log, now } from '../util/util';\nimport { env } from '../util/env';\nimport * as humangl from './humangl';\nimport * as tf from '../../dist/tfjs.esm.js';\n\nfunction registerCustomOps() {\n if (!env.kernels.includes('mod')) {\n const kernelMod = {\n kernelName: 'Mod',\n backendName: tf.getBackend(),\n kernelFunc: (op) => tf.tidy(() => tf.sub(op.inputs.a, tf.mul(tf.div(op.inputs.a, op.inputs.b), op.inputs.b))),\n };\n tf.registerKernel(kernelMod);\n }\n if (!env.kernels.includes('floormod')) {\n const kernelMod = {\n kernelName: 'FloorMod',\n backendName: tf.getBackend(),\n kernelFunc: (op) => tf.tidy(() => tf.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tf.mod(op.inputs.a, op.inputs.b)),\n };\n tf.registerKernel(kernelMod);\n }\n env.updateBackend();\n}\n\nexport async function check(instance, force = false) {\n instance.state = 'backend';\n if (force || env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {\n const timeStamp = now();\n\n if (instance.config.backend && instance.config.backend.length > 0) {\n // detect web worker\n // @ts-ignore ignore missing type for WorkerGlobalScope as that is the point\n if (typeof window === 'undefined' && typeof WorkerGlobalScope !== 'undefined' && instance.config.debug) {\n if (instance.config.debug) log('running inside web worker');\n }\n\n // force browser vs node backend\n if (env.browser && instance.config.backend === 'tensorflow') {\n if (instance.config.debug) log('override: backend set to tensorflow while running in browser');\n instance.config.backend = 'humangl';\n }\n if (env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {\n if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`);\n instance.config.backend = 'tensorflow';\n }\n\n // handle webgpu\n if (env.browser && instance.config.backend === 'webgpu') {\n if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {\n log('override: backend set to webgpu but browser does not support webgpu');\n instance.config.backend = 'humangl';\n } else {\n const adapter = await navigator['gpu'].requestAdapter();\n if (instance.config.debug) log('enumerated webgpu adapter:', adapter);\n }\n }\n\n // check available backends\n if (instance.config.backend === 'humangl') await humangl.register(instance);\n const available = Object.keys(tf.engine().registryFactory);\n if (instance.config.debug) log('available backends:', available);\n\n if (!available.includes(instance.config.backend)) {\n log(`error: backend ${instance.config.backend} not found in registry`);\n instance.config.backend = env.node ? 'tensorflow' : 'webgl';\n if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`);\n }\n\n if (instance.config.debug) log('setting backend:', instance.config.backend);\n\n // customize wasm\n if (instance.config.backend === 'wasm') {\n if (instance.config.debug) log('wasm path:', instance.config.wasmPath);\n if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath);\n else throw new Error('wasm backend is not loaded');\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n const mt = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');\n if (instance.config.debug) log(`wasm execution: ${simd ? 'SIMD' : 'no SIMD'} ${mt ? 'multithreaded' : 'singlethreaded'}`);\n if (instance.config.debug && !simd) log('warning: wasm simd support is not enabled');\n }\n\n try {\n await tf.setBackend(instance.config.backend);\n await tf.ready();\n } catch (err) {\n log('error: cannot set backend:', instance.config.backend, err);\n return false;\n }\n }\n\n // customize humangl\n if (tf.getBackend() === 'humangl') {\n tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);\n tf.ENV.set('WEBGL_CPU_FORWARD', true);\n // tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);\n tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);\n tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256);\n // if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision\n if (typeof instance.config['deallocate'] !== 'undefined' && instance.config['deallocate']) { // hidden param\n log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);\n }\n if (tf.backend().getGPGPUContext) {\n const gl = await tf.backend().getGPGPUContext().gl;\n if (instance.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);\n }\n }\n\n // customize webgpu\n if (tf.getBackend() === 'webgpu') {\n // tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512);\n // tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0);\n // tf.ENV.set('WEBGPU_CPU_FORWARD', true);\n }\n\n // wait for ready\n tf.enableProdMode();\n await tf.ready();\n\n instance.performance.initBackend = Math.trunc(now() - timeStamp);\n instance.config.backend = tf.getBackend();\n\n env.updateBackend(); // update env on backend init\n registerCustomOps();\n }\n return true;\n}\n\n// register fake missing tfjs ops\nexport function fakeOps(kernelNames: Array, config) {\n // if (config.debug) log('registerKernel:', kernelNames);\n for (const kernelName of kernelNames) {\n const kernelConfig = {\n kernelName,\n backendName: config.backend,\n kernelFunc: () => { if (config.debug) log('kernelFunc', kernelName, config.backend); },\n // setupFunc: () => { if (config.debug) log('kernelFunc', kernelName, config.backend); },\n // disposeFunc: () => { if (config.debug) log('kernelFunc', kernelName, config.backend); },\n };\n tf.registerKernel(kernelConfig);\n }\n env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops\n}\n", "/**\n * Module that implements helper draw functions, exposed as human.draw\n */\n\nimport { TRI468 as triangulation } from '../face/facemeshcoords';\nimport { mergeDeep, now } from './util';\nimport { env } from './env';\nimport type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult, Point } from '../result';\nimport type { AnyCanvas } from '../exports';\n\n/** Draw Options\n * - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter\n */\nexport type DrawOptions = {\n /** draw line color */\n color: string,\n /** label color */\n labelColor: string,\n /** label shadow color */\n shadowColor: string,\n /** label font */\n font: string,\n /** line spacing between labels */\n lineHeight: number,\n /** line width for drawn lines */\n lineWidth: number,\n /** size of drawn points */\n pointSize: number,\n /** draw rounded boxes by n pixels */\n roundRect: number,\n /** should points be drawn? */\n drawPoints: boolean,\n /** should labels be drawn? */\n drawLabels: boolean,\n /** should detected gestures be drawn? */\n drawGestures: boolean,\n /** should draw boxes around detection results? */\n drawBoxes: boolean,\n /** should draw polygons from detection points? */\n drawPolygons: boolean,\n /** should draw gaze arrows? */\n drawGaze: boolean,\n /** should fill polygons? */\n fillPolygons: boolean,\n /** use z-coordinate when available */\n useDepth: boolean,\n /** should lines be curved? */\n useCurves: boolean,\n}\n\nexport const options: DrawOptions = {\n color: 'rgba(173, 216, 230, 0.6)', // 'lightblue' with light alpha channel\n labelColor: 'rgba(173, 216, 230, 1)', // 'lightblue' with dark alpha channel\n shadowColor: 'black',\n font: 'small-caps 16px \"Segoe UI\"',\n lineHeight: 18,\n lineWidth: 4,\n pointSize: 2,\n roundRect: 8,\n drawPoints: false,\n drawLabels: true,\n drawBoxes: true,\n drawGestures: true,\n drawPolygons: true,\n drawGaze: true,\n fillPolygons: false,\n useDepth: true,\n useCurves: false,\n};\n\nlet drawTime = 0;\n\nconst getCanvasContext = (input) => {\n if (input && input.getContext) return input.getContext('2d');\n throw new Error('invalid canvas');\n};\n\nconst rad2deg = (theta) => Math.round((theta * 180) / Math.PI);\n\nfunction point(ctx: CanvasRenderingContext2D, x, y, z = 0, localOptions) {\n ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;\n ctx.beginPath();\n ctx.arc(x, y, localOptions.pointSize, 0, 2 * Math.PI);\n ctx.fill();\n}\n\nfunction rect(ctx: CanvasRenderingContext2D, x, y, width, height, localOptions) {\n ctx.beginPath();\n if (localOptions.useCurves) {\n const cx = (x + x + width) / 2;\n const cy = (y + y + height) / 2;\n ctx.ellipse(cx, cy, width / 2, height / 2, 0, 0, 2 * Math.PI);\n } else {\n ctx.lineWidth = localOptions.lineWidth;\n ctx.moveTo(x + localOptions.roundRect, y);\n ctx.lineTo(x + width - localOptions.roundRect, y);\n ctx.quadraticCurveTo(x + width, y, x + width, y + localOptions.roundRect);\n ctx.lineTo(x + width, y + height - localOptions.roundRect);\n ctx.quadraticCurveTo(x + width, y + height, x + width - localOptions.roundRect, y + height);\n ctx.lineTo(x + localOptions.roundRect, y + height);\n ctx.quadraticCurveTo(x, y + height, x, y + height - localOptions.roundRect);\n ctx.lineTo(x, y + localOptions.roundRect);\n ctx.quadraticCurveTo(x, y, x + localOptions.roundRect, y);\n ctx.closePath();\n }\n ctx.stroke();\n}\n\nfunction lines(ctx: CanvasRenderingContext2D, points: Point[], localOptions) {\n if (points.length < 2) return;\n ctx.beginPath();\n ctx.moveTo(points[0][0], points[0][1]);\n for (const pt of points) {\n const z = pt[2] || 0;\n ctx.strokeStyle = localOptions.useDepth && z !== 0 ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;\n ctx.fillStyle = localOptions.useDepth && z !== 0 ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;\n ctx.lineTo(pt[0], Math.round(pt[1]));\n }\n ctx.stroke();\n if (localOptions.fillPolygons) {\n ctx.closePath();\n ctx.fill();\n }\n}\n\nfunction curves(ctx: CanvasRenderingContext2D, points: Point[], localOptions) {\n if (points.length < 2) return;\n if (!localOptions.useCurves || points.length <= 2) {\n lines(ctx, points, localOptions);\n return;\n }\n ctx.moveTo(points[0][0], points[0][1]);\n for (let i = 0; i < points.length - 2; i++) {\n const xc = (points[i][0] + points[i + 1][0]) / 2;\n const yc = (points[i][1] + points[i + 1][1]) / 2;\n ctx.quadraticCurveTo(points[i][0], points[i][1], xc, yc);\n }\n ctx.quadraticCurveTo(points[points.length - 2][0], points[points.length - 2][1], points[points.length - 1][0], points[points.length - 1][1]);\n ctx.stroke();\n if (localOptions.fillPolygons) {\n ctx.closePath();\n ctx.fill();\n }\n}\n\nfunction arrow(ctx: CanvasRenderingContext2D, from: Point, to: Point, radius = 5) {\n let angle;\n let x;\n let y;\n ctx.beginPath();\n ctx.moveTo(from[0], from[1]);\n ctx.lineTo(to[0], to[1]);\n angle = Math.atan2(to[1] - from[1], to[0] - from[0]);\n x = radius * Math.cos(angle) + to[0];\n y = radius * Math.sin(angle) + to[1];\n ctx.moveTo(x, y);\n angle += (1.0 / 3.0) * (2 * Math.PI);\n x = radius * Math.cos(angle) + to[0];\n y = radius * Math.sin(angle) + to[1];\n ctx.lineTo(x, y);\n angle += (1.0 / 3.0) * (2 * Math.PI);\n x = radius * Math.cos(angle) + to[0];\n y = radius * Math.sin(angle) + to[1];\n ctx.lineTo(x, y);\n ctx.closePath();\n ctx.stroke();\n ctx.fill();\n}\n\n/** draw detected gestures */\nexport async function gesture(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n if (localOptions.drawGestures) {\n const ctx = getCanvasContext(inCanvas);\n ctx.font = localOptions.font;\n ctx.fillStyle = localOptions.color;\n let i = 1;\n for (let j = 0; j < result.length; j++) {\n let where: unknown[] = []; // what&where is a record\n let what: unknown[] = []; // what&where is a record\n [where, what] = Object.entries(result[j]);\n if ((what.length > 1) && ((what[1] as string).length > 0)) {\n const who = where[1] as number > 0 ? `#${where[1]}` : '';\n const label = `${where[0]} ${who}: ${what[1]}`;\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight));\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(label, 6, 0 + (i * localOptions.lineHeight));\n i += 1;\n }\n }\n }\n}\n\n/** draw detected faces */\nexport async function face(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n const ctx = getCanvasContext(inCanvas);\n for (const f of result) {\n ctx.font = localOptions.font;\n ctx.strokeStyle = localOptions.color;\n ctx.fillStyle = localOptions.color;\n if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);\n if (localOptions.drawLabels) {\n // silly hack since fillText does not suport new line\n const labels:string[] = [];\n labels.push(`face: ${Math.trunc(100 * f.score)}%`);\n if (f.genderScore) labels.push(`${f.gender || ''} ${Math.trunc(100 * f.genderScore)}%`);\n if (f.age) labels.push(`age: ${f.age || ''}`);\n if (f.iris) labels.push(`distance: ${f.iris}`);\n if (f.real) labels.push(`real: ${Math.trunc(100 * f.real)}%`);\n if (f.emotion && f.emotion.length > 0) {\n const emotion = f.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);\n if (emotion.length > 3) emotion.length = 3;\n labels.push(emotion.join(' '));\n }\n if (f.rotation && f.rotation.angle && f.rotation.gaze) {\n if (f.rotation.angle.roll) labels.push(`roll: ${rad2deg(f.rotation.angle.roll)}\u00B0 yaw:${rad2deg(f.rotation.angle.yaw)}\u00B0 pitch:${rad2deg(f.rotation.angle.pitch)}\u00B0`);\n if (f.rotation.gaze.bearing) labels.push(`gaze: ${rad2deg(f.rotation.gaze.bearing)}\u00B0`);\n }\n if (labels.length === 0) labels.push('face');\n ctx.fillStyle = localOptions.color;\n for (let i = labels.length - 1; i >= 0; i--) {\n const x = Math.max(f.box[0], 0);\n const y = i * localOptions.lineHeight + f.box[1];\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(labels[i], x + 5, y + 16);\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(labels[i], x + 4, y + 15);\n }\n }\n ctx.lineWidth = 1;\n if (f.mesh && f.mesh.length > 0) {\n if (localOptions.drawPoints) {\n for (const pt of f.mesh) point(ctx, pt[0], pt[1], pt[2], localOptions);\n }\n if (localOptions.drawPolygons) {\n ctx.lineWidth = 1;\n if (f.mesh.length > 450) {\n for (let i = 0; i < triangulation.length / 3; i++) {\n const points = [\n triangulation[i * 3 + 0],\n triangulation[i * 3 + 1],\n triangulation[i * 3 + 2],\n ].map((index) => f.mesh[index]);\n lines(ctx, points, localOptions);\n }\n }\n // iris: array[center, left, top, right, bottom]\n if (f.annotations && f.annotations['leftEyeIris'] && f.annotations['leftEyeIris'][0]) {\n ctx.strokeStyle = localOptions.useDepth ? 'rgba(255, 200, 255, 0.3)' : localOptions.color;\n ctx.beginPath();\n const sizeX = Math.abs(f.annotations['leftEyeIris'][3][0] - f.annotations['leftEyeIris'][1][0]) / 2;\n const sizeY = Math.abs(f.annotations['leftEyeIris'][4][1] - f.annotations['leftEyeIris'][2][1]) / 2;\n ctx.ellipse(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (localOptions.fillPolygons) {\n ctx.fillStyle = localOptions.useDepth ? 'rgba(255, 255, 200, 0.3)' : localOptions.color;\n ctx.fill();\n }\n }\n if (f.annotations && f.annotations['rightEyeIris'] && f.annotations['rightEyeIris'][0]) {\n ctx.strokeStyle = localOptions.useDepth ? 'rgba(255, 200, 255, 0.3)' : localOptions.color;\n ctx.beginPath();\n const sizeX = Math.abs(f.annotations['rightEyeIris'][3][0] - f.annotations['rightEyeIris'][1][0]) / 2;\n const sizeY = Math.abs(f.annotations['rightEyeIris'][4][1] - f.annotations['rightEyeIris'][2][1]) / 2;\n ctx.ellipse(f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (localOptions.fillPolygons) {\n ctx.fillStyle = localOptions.useDepth ? 'rgba(255, 255, 200, 0.3)' : localOptions.color;\n ctx.fill();\n }\n }\n if (localOptions.drawGaze && f.rotation?.angle) {\n ctx.strokeStyle = 'pink';\n const valX = (f.box[0] + f.box[2] / 2) - (f.box[3] * rad2deg(f.rotation.angle.yaw) / 90);\n const valY = (f.box[1] + f.box[3] / 2) + (f.box[2] * rad2deg(f.rotation.angle.pitch) / 90);\n const pathV = new Path2D(`\n M ${f.box[0] + f.box[2] / 2} ${f.box[1]}\n C\n ${valX} ${f.box[1]},\n ${valX} ${f.box[1] + f.box[3]},\n ${f.box[0] + f.box[2] / 2} ${f.box[1] + f.box[3]}\n `);\n const pathH = new Path2D(`\n M ${f.box[0]} ${f.box[1] + f.box[3] / 2}\n C \n ${f.box[0]} ${valY},\n ${f.box[0] + f.box[2]} ${valY},\n ${f.box[0] + f.box[2]} ${f.box[1] + f.box[3] / 2}\n `);\n ctx.stroke(pathH);\n ctx.stroke(pathV);\n }\n if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing && f.annotations['leftEyeIris'] && f.annotations['rightEyeIris'] && f.annotations['leftEyeIris'][0] && f.annotations['rightEyeIris'][0]) {\n ctx.strokeStyle = 'pink';\n ctx.fillStyle = 'pink';\n const leftGaze = [\n f.annotations['leftEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),\n f.annotations['leftEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),\n ];\n arrow(ctx, [f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]], [leftGaze[0], leftGaze[1]], 4);\n const rightGaze = [\n f.annotations['rightEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),\n f.annotations['rightEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),\n ];\n arrow(ctx, [f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1]], [rightGaze[0], rightGaze[1]], 4);\n }\n }\n }\n }\n}\n\n/** draw detected bodies */\nexport async function body(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n const ctx = getCanvasContext(inCanvas);\n ctx.lineJoin = 'round';\n for (let i = 0; i < result.length; i++) {\n ctx.strokeStyle = localOptions.color;\n ctx.fillStyle = localOptions.color;\n ctx.lineWidth = localOptions.lineWidth;\n ctx.font = localOptions.font;\n if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {\n rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);\n if (localOptions.drawLabels) {\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);\n }\n }\n if (localOptions.drawPoints && result[i].keypoints) {\n for (let pt = 0; pt < result[i].keypoints.length; pt++) {\n ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position[2] ? `rgba(${127.5 + (2 * (result[i].keypoints[pt].position[2] || 0))}, ${127.5 - (2 * (result[i].keypoints[pt].position[2] || 0))}, 255, 0.5)` : localOptions.color;\n point(ctx, result[i].keypoints[pt].position[0], result[i].keypoints[pt].position[1], 0, localOptions);\n }\n }\n if (localOptions.drawLabels && result[i].keypoints) {\n ctx.font = localOptions.font;\n for (const pt of result[i].keypoints) {\n ctx.fillStyle = localOptions.useDepth && pt.position[2] ? `rgba(${127.5 + (2 * pt.position[2])}, ${127.5 - (2 * pt.position[2])}, 255, 0.5)` : localOptions.color;\n ctx.fillText(`${pt.part} ${Math.trunc(100 * pt.score)}%`, pt.position[0] + 4, pt.position[1] + 4);\n }\n }\n if (localOptions.drawPolygons && result[i].keypoints && result[i].annotations) {\n for (const part of Object.values(result[i].annotations)) {\n for (const connected of part) curves(ctx, connected, localOptions);\n }\n }\n }\n}\n\n/** draw detected hands */\nexport async function hand(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n const ctx = getCanvasContext(inCanvas);\n ctx.lineJoin = 'round';\n ctx.font = localOptions.font;\n for (const h of result) {\n if (localOptions.drawBoxes) {\n ctx.strokeStyle = localOptions.color;\n ctx.fillStyle = localOptions.color;\n rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);\n if (localOptions.drawLabels) {\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(`hand:${Math.trunc(100 * h.score)}%`, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]); // can use h.label\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(`hand:${Math.trunc(100 * h.score)}%`, h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]); // can use h.label\n }\n ctx.stroke();\n }\n if (localOptions.drawPoints) {\n if (h.keypoints && h.keypoints.length > 0) {\n for (const pt of h.keypoints) {\n ctx.fillStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * (pt[2] || 0))}, ${127.5 - (2 * (pt[2] || 0))}, 255, 0.5)` : localOptions.color;\n point(ctx, pt[0], pt[1], 0, localOptions);\n }\n }\n }\n if (localOptions.drawLabels && h.annotations) {\n const addHandLabel = (part, title) => {\n if (!part || part.length === 0 || !part[0]) return;\n ctx.fillStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * part[part.length - 1][2])}, ${127.5 - (2 * part[part.length - 1][2])}, 255, 0.5)` : localOptions.color;\n ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);\n };\n ctx.font = localOptions.font;\n addHandLabel(h.annotations['index'], 'index');\n addHandLabel(h.annotations['middle'], 'middle');\n addHandLabel(h.annotations['ring'], 'ring');\n addHandLabel(h.annotations['pinky'], 'pinky');\n addHandLabel(h.annotations['thumb'], 'thumb');\n addHandLabel(h.annotations['palm'], 'palm');\n }\n if (localOptions.drawPolygons && h.annotations) {\n const addHandLine = (part) => {\n if (!part || part.length === 0 || !part[0]) return;\n for (let i = 0; i < part.length; i++) {\n ctx.beginPath();\n ctx.strokeStyle = localOptions.useDepth ? `rgba(${127.5 + (i * part[i][2])}, ${127.5 - (i * part[i][2])}, 255, 0.5)` : localOptions.color;\n ctx.moveTo(part[i > 0 ? i - 1 : 0][0], part[i > 0 ? i - 1 : 0][1]);\n ctx.lineTo(part[i][0], part[i][1]);\n ctx.stroke();\n }\n };\n ctx.lineWidth = localOptions.lineWidth;\n addHandLine(h.annotations['index']);\n addHandLine(h.annotations['middle']);\n addHandLine(h.annotations['ring']);\n addHandLine(h.annotations['pinky']);\n addHandLine(h.annotations['thumb']);\n // addPart(h.annotations.palm);\n }\n }\n}\n\n/** draw detected objects */\nexport async function object(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n const ctx = getCanvasContext(inCanvas);\n ctx.lineJoin = 'round';\n ctx.font = localOptions.font;\n for (const h of result) {\n if (localOptions.drawBoxes) {\n ctx.strokeStyle = localOptions.color;\n ctx.fillStyle = localOptions.color;\n rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);\n if (localOptions.drawLabels) {\n const label = `${h.label} ${Math.round(100 * h.score)}%`;\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(label, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(label, h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);\n }\n ctx.stroke();\n }\n }\n}\n\n/** draw combined person results instead of individual detection result objects */\nexport async function person(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) {\n const localOptions = mergeDeep(options, drawOptions);\n if (!result || !inCanvas) return;\n const ctx = getCanvasContext(inCanvas);\n ctx.lineJoin = 'round';\n ctx.font = localOptions.font;\n\n for (let i = 0; i < result.length; i++) {\n if (localOptions.drawBoxes) {\n ctx.strokeStyle = localOptions.color;\n ctx.fillStyle = localOptions.color;\n rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);\n if (localOptions.drawLabels) {\n const label = `person #${i}`;\n if (localOptions.shadowColor && localOptions.shadowColor !== '') {\n ctx.fillStyle = localOptions.shadowColor;\n ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);\n }\n ctx.fillStyle = localOptions.labelColor;\n ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);\n }\n ctx.stroke();\n }\n }\n}\n\n/** draw processed canvas */\nexport async function canvas(input: AnyCanvas | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) {\n if (!input || !output) return;\n const ctx = getCanvasContext(output);\n ctx.drawImage(input, 0, 0);\n}\n\n/** meta-function that performs draw for: canvas, face, body, hand\n * @returns {Promise}\n*/\nexport async function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial) {\n if (!result || !result.performance || !result || !inCanvas) return null;\n const timeStamp = now();\n const localOptions = mergeDeep(options, drawOptions);\n const promise = Promise.all([\n face(inCanvas, result.face, localOptions),\n body(inCanvas, result.body, localOptions),\n hand(inCanvas, result.hand, localOptions),\n object(inCanvas, result.object, localOptions),\n gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering\n // person(inCanvas, result.persons, localOptions); // already included above\n ]);\n drawTime = env.perfadd ? drawTime + Math.round(now() - timeStamp) : Math.round(now() - timeStamp);\n result.performance.draw = drawTime;\n return promise;\n}\n", "// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars\nconst rad2deg = (theta) => Math.round((theta * 180) / Math.PI);\n\nconst calculateGaze = (face): { bearing: number, strength: number } => {\n const radians = (pt1, pt2) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points\n if (!face.annotations['rightEyeIris'] || !face.annotations['leftEyeIris']) return { bearing: 0, strength: 0 };\n\n const offsetIris = [0, -0.1]; // iris center may not align with average of eye extremes\n const eyeRatio = 1; // factor to normalize changes x vs y\n\n const left = face.mesh[33][2] > face.mesh[263][2]; // pick left or right eye depending which one is closer bazed on outsize point z axis\n const irisCenter = left ? face.mesh[473] : face.mesh[468];\n const eyeCenter = left // eye center is average of extreme points on x axis for both x and y, ignoring y extreme points as eyelids naturally open/close more when gazing up/down so relative point is less precise\n ? [(face.mesh[133][0] + face.mesh[33][0]) / 2, (face.mesh[133][1] + face.mesh[33][1]) / 2]\n : [(face.mesh[263][0] + face.mesh[362][0]) / 2, (face.mesh[263][1] + face.mesh[362][1]) / 2];\n const eyeSize = left // eye size is difference between extreme points for both x and y, used to normalize & squarify eye dimensions\n ? [face.mesh[133][0] - face.mesh[33][0], face.mesh[23][1] - face.mesh[27][1]]\n : [face.mesh[263][0] - face.mesh[362][0], face.mesh[253][1] - face.mesh[257][1]];\n\n const eyeDiff = [ // x distance between extreme point and center point normalized with eye size\n (eyeCenter[0] - irisCenter[0]) / eyeSize[0] - offsetIris[0],\n eyeRatio * (irisCenter[1] - eyeCenter[1]) / eyeSize[1] - offsetIris[1],\n ];\n let strength = Math.sqrt((eyeDiff[0] ** 2) + (eyeDiff[1] ** 2)); // vector length is a diagonal between two differences\n strength = Math.min(strength, face.boxRaw[2] / 2, face.boxRaw[3] / 2); // limit strength to half of box size to avoid clipping due to low precision\n const bearing = (radians([0, 0], eyeDiff) + (Math.PI / 2)) % Math.PI; // using eyeDiff instead eyeCenter/irisCenter combo due to manual adjustments and rotate clockwise 90degrees\n\n return { bearing, strength };\n};\n\nexport const calculateFaceAngle = (face, imageSize): {\n angle: { pitch: number, yaw: number, roll: number },\n matrix: [number, number, number, number, number, number, number, number, number],\n gaze: { bearing: number, strength: number },\n} => {\n // const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360);\n const normalize = (v) => { // normalize vector\n const length = Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);\n v[0] /= length;\n v[1] /= length;\n v[2] /= length;\n return v;\n };\n const subVectors = (a, b) => { // vector subtraction (a - b)\n const x = a[0] - b[0];\n const y = a[1] - b[1];\n const z = a[2] - b[2];\n return [x, y, z];\n };\n const crossVectors = (a, b) => { // vector cross product (a x b)\n const x = a[1] * b[2] - a[2] * b[1];\n const y = a[2] * b[0] - a[0] * b[2];\n const z = a[0] * b[1] - a[1] * b[0];\n return [x, y, z];\n };\n // 3x3 rotation matrix to Euler angles based on https://www.geometrictools.com/Documentation/EulerAngles.pdf\n const rotationMatrixToEulerAngle = (r) => {\n // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars\n const [r00, r01, r02, r10, r11, r12, r20, r21, r22] = r;\n let thetaX: number;\n let thetaY: number;\n let thetaZ: number;\n if (r10 < 1) { // YZX calculation\n if (r10 > -1) {\n thetaZ = Math.asin(r10);\n thetaY = Math.atan2(-r20, r00);\n thetaX = Math.atan2(-r12, r11);\n } else {\n thetaZ = -Math.PI / 2;\n thetaY = -Math.atan2(r21, r22);\n thetaX = 0;\n }\n } else {\n thetaZ = Math.PI / 2;\n thetaY = Math.atan2(r21, r22);\n thetaX = 0;\n }\n if (isNaN(thetaX)) thetaX = 0;\n if (isNaN(thetaY)) thetaY = 0;\n if (isNaN(thetaZ)) thetaZ = 0;\n return { pitch: 2 * -thetaX, yaw: 2 * -thetaY, roll: 2 * -thetaZ };\n };\n // simple Euler angle calculation based existing 3D mesh\n // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars\n const meshToEulerAngle = (mesh) => {\n const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1);\n // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars\n const angle = {\n // values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees, value of 0 means center\n // pitch is face move up/down\n pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]), // looking at y,z of top and bottom points of the face\n // yaw is face turn left/right\n yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]), // looking at x,z of outside corners of leftEye and rightEye\n // roll is face lean left/right\n roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]), // looking at x,y of outside corners of leftEye and rightEye\n };\n return angle;\n };\n\n // initialize gaze and mesh\n const mesh = face.meshRaw;\n if (!mesh || mesh.length < 300) return { angle: { pitch: 0, yaw: 0, roll: 0 }, matrix: [1, 0, 0, 0, 1, 0, 0, 0, 1], gaze: { bearing: 0, strength: 0 } };\n\n const size = Math.max(face.boxRaw[2] * imageSize[0], face.boxRaw[3] * imageSize[1]) / 1.5;\n // top, bottom, left, right\n const pts = [mesh[10], mesh[152], mesh[234], mesh[454]].map((pt) => [\n // make the xyz coordinates proportional, independent of the image/box size\n pt[0] * imageSize[0] / size,\n pt[1] * imageSize[1] / size,\n pt[2],\n ]);\n\n const y_axis = normalize(subVectors(pts[1], pts[0]));\n let x_axis = normalize(subVectors(pts[3], pts[2]));\n const z_axis = normalize(crossVectors(x_axis, y_axis));\n // adjust x_axis to make sure that all axes are perpendicular to each other\n x_axis = crossVectors(y_axis, z_axis);\n\n // Rotation Matrix from Axis Vectors - http://renderdan.blogspot.com/2006/05/rotation-matrix-from-axis-vectors.html\n // 3x3 rotation matrix is flatten to array in row-major order. Note that the rotation represented by this matrix is inverted.\n const matrix: [number, number, number, number, number, number, number, number, number] = [\n x_axis[0], x_axis[1], x_axis[2],\n y_axis[0], y_axis[1], y_axis[2],\n z_axis[0], z_axis[1], z_axis[2],\n ];\n const angle = rotationMatrixToEulerAngle(matrix);\n // const angle = meshToEulerAngle(mesh);\n\n // we have iris keypoints so we can calculate gaze direction\n const gaze = mesh.length === 478 ? calculateGaze(face) : { bearing: 0, strength: 0 };\n\n return { angle, matrix, gaze };\n};\n", "/**\n * Face algorithm implementation\n * Uses FaceMesh, Emotion and FaceRes models to create a unified pipeline\n */\n\nimport { log, now } from '../util/util';\nimport { env } from '../util/env';\nimport * as tf from '../../dist/tfjs.esm.js';\nimport * as facemesh from './facemesh';\nimport * as emotion from '../gear/emotion';\nimport * as faceres from './faceres';\nimport * as antispoof from './antispoof';\nimport type { FaceResult } from '../result';\nimport type { Tensor } from '../tfjs/types';\nimport { calculateFaceAngle } from './angles';\n\nexport const detectFace = async (parent /* instance of human */, input: Tensor): Promise => {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let gearRes;\n let genderRes;\n let emotionRes;\n let embeddingRes;\n let antispoofRes;\n let descRes;\n const faceRes: Array = [];\n parent.state = 'run:face';\n timeStamp = now();\n\n const faces = await facemesh.predict(input, parent.config);\n parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);\n if (!input.shape || input.shape.length !== 4) return [];\n if (!faces) return [];\n // for (const face of faces) {\n for (let i = 0; i < faces.length; i++) {\n parent.analyze('Get Face');\n\n // is something went wrong, skip the face\n // @ts-ignore possibly undefied\n if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {\n log('Face object is disposed:', faces[i].tensor);\n continue;\n }\n\n const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);\n\n // run emotion, inherits face from blazeface\n parent.analyze('Start Emotion:');\n if (parent.config.async) {\n emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n } else {\n parent.state = 'run:emotion';\n timeStamp = now();\n emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);\n }\n parent.analyze('End Emotion:');\n\n // run antispoof, inherits face from blazeface\n parent.analyze('Start AntiSpoof:');\n if (parent.config.async) {\n antispoofRes = parent.config.face.antispoof.enabled ? antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n } else {\n parent.state = 'run:antispoof';\n timeStamp = now();\n antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);\n }\n parent.analyze('End AntiSpoof:');\n\n // run gear, inherits face from blazeface\n /*\n parent.analyze('Start GEAR:');\n if (parent.config.async) {\n gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};\n } else {\n parent.state = 'run:gear';\n timeStamp = now();\n gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};\n parent.performance.emotion = Math.trunc(now() - timeStamp);\n }\n parent.analyze('End GEAR:');\n */\n\n // run emotion, inherits face from blazeface\n parent.analyze('Start Description:');\n if (parent.config.async) {\n descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n } else {\n parent.state = 'run:description';\n timeStamp = now();\n descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;\n parent.performance.description = env.perfadd ? (parent.performance.description || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);\n }\n parent.analyze('End Description:');\n\n // if async wait for results\n if (parent.config.async) {\n [ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes]);\n }\n\n parent.analyze('Finish Face:');\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n if (!parent.config.face.iris.enabled && faces[i]?.annotations?.leftEyeIris && faces[i]?.annotations?.rightEyeIris) {\n delete faces[i].annotations.leftEyeIris;\n delete faces[i].annotations.rightEyeIris;\n }\n const irisSize = (faces[i].annotations && faces[i].annotations.leftEyeIris && faces[i].annotations.leftEyeIris[0] && faces[i].annotations.rightEyeIris && faces[i].annotations.rightEyeIris[0]\n && (faces[i].annotations.leftEyeIris.length > 0) && (faces[i].annotations.rightEyeIris.length > 0)\n && (faces[i].annotations.leftEyeIris[0] !== null) && (faces[i].annotations.rightEyeIris[0] !== null))\n ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]\n : 0; // note: average human iris size is 11.7mm\n\n // optionally return tensor\n const tensor = parent.config.face.detector.return ? tf.squeeze(faces[i].tensor) : null;\n // dispose original face tensor\n tf.dispose(faces[i].tensor);\n // delete temp face image\n if (faces[i].tensor) delete faces[i].tensor;\n // combine results\n faceRes.push({\n ...faces[i],\n id: i,\n age: descRes?.age,\n gender: descRes?.gender,\n genderScore: descRes?.genderScore,\n embedding: descRes?.descriptor,\n emotion: emotionRes,\n real: antispoofRes,\n iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,\n rotation,\n tensor,\n });\n parent.analyze('End Face');\n }\n parent.analyze('End FaceMesh:');\n if (parent.config.async) {\n if (parent.performance.face) delete parent.performance.face;\n if (parent.performance.age) delete parent.performance.age;\n if (parent.performance.gender) delete parent.performance.gender;\n if (parent.performance.emotion) delete parent.performance.emotion;\n }\n return faceRes;\n};\n", "/**\n * Gesture detection algorithm\n */\n\nimport type { GestureResult } from '../result';\nimport * as fingerPose from '../hand/fingerpose';\n\n/**\n * @typedef FaceGesture\n */\nexport type FaceGesture =\n `facing ${'left' | 'center' | 'right'}`\n | `blink ${'left' | 'right'} eye`\n | `mouth ${number}% open`\n | `head ${'up' | 'down'}`;\n\n/**\n * @typedef IrisGesture\n */\nexport type IrisGesture =\n 'facing center'\n | `looking ${'left' | 'right' | 'up' | 'down'}`\n | 'looking center';\n\n/**\n * @typedef BodyGesture\n */\nexport type BodyGesture =\n `leaning ${'left' | 'right'}`\n | `raise ${'left' | 'right'} hand`\n | 'i give up';\n\n/**\n * @typedef BodyGesture\n */\nexport type HandGesture =\n `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} forward`\n | `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} up`\n | 'victory'\n | 'thumbs up';\n\nexport const body = (res): GestureResult[] => {\n if (!res) return [];\n const gestures: Array<{ body: number, gesture: BodyGesture }> = [];\n for (let i = 0; i < res.length; i++) {\n // raising hands\n const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));\n const rightWrist = res[i].keypoints.find((a) => (a.part === 'rightWrist'));\n const nose = res[i].keypoints.find((a) => (a.part === 'nose'));\n if (nose && leftWrist && rightWrist && (leftWrist.position[1] < nose.position[1]) && (rightWrist.position[1] < nose.position[1])) gestures.push({ body: i, gesture: 'i give up' });\n else if (nose && leftWrist && (leftWrist.position[1] < nose.position[1])) gestures.push({ body: i, gesture: 'raise left hand' });\n else if (nose && rightWrist && (rightWrist.position[1] < nose.position[1])) gestures.push({ body: i, gesture: 'raise right hand' });\n\n // leaning\n const leftShoulder = res[i].keypoints.find((a) => (a.part === 'leftShoulder'));\n const rightShoulder = res[i].keypoints.find((a) => (a.part === 'rightShoulder'));\n if (leftShoulder && rightShoulder) gestures.push({ body: i, gesture: `leaning ${(leftShoulder.position[1] > rightShoulder.position[1]) ? 'left' : 'right'}` });\n }\n return gestures;\n};\n\nexport const face = (res): GestureResult[] => {\n if (!res) return [];\n const gestures: Array<{ face: number, gesture: FaceGesture }> = [];\n for (let i = 0; i < res.length; i++) {\n if (res[i].mesh && res[i].mesh.length > 450) {\n const eyeFacing = res[i].mesh[33][2] - res[i].mesh[263][2];\n if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing center' });\n else gestures.push({ face: i, gesture: `facing ${eyeFacing < 0 ? 'left' : 'right'}` });\n const openLeft = Math.abs(res[i].mesh[374][1] - res[i].mesh[386][1]) / Math.abs(res[i].mesh[443][1] - res[i].mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openLeft < 0.2) gestures.push({ face: i, gesture: 'blink left eye' });\n const openRight = Math.abs(res[i].mesh[145][1] - res[i].mesh[159][1]) / Math.abs(res[i].mesh[223][1] - res[i].mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openRight < 0.2) gestures.push({ face: i, gesture: 'blink right eye' });\n const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));\n if (mouthOpen > 10) gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });\n const chinDepth = res[i].mesh[152][2];\n if (Math.abs(chinDepth) > 10) gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? 'up' : 'down'}` });\n }\n }\n return gestures;\n};\n\nexport const iris = (res): GestureResult[] => {\n if (!res) return [];\n const gestures: Array<{ iris: number, gesture: IrisGesture }> = [];\n for (let i = 0; i < res.length; i++) {\n if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.leftEyeIris[0] || !res[i].annotations.rightEyeIris || !res[i].annotations.rightEyeIris[0]) continue;\n const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];\n const sizeYLeft = res[i].annotations.leftEyeIris[4][1] - res[i].annotations.leftEyeIris[2][1];\n const areaLeft = Math.abs(sizeXLeft * sizeYLeft);\n\n const sizeXRight = res[i].annotations.rightEyeIris[3][0] - res[i].annotations.rightEyeIris[1][0];\n const sizeYRight = res[i].annotations.rightEyeIris[4][1] - res[i].annotations.rightEyeIris[2][1];\n const areaRight = Math.abs(sizeXRight * sizeYRight);\n\n let center = false;\n const difference = Math.abs(areaLeft - areaRight) / Math.max(areaLeft, areaRight);\n if (difference < 0.25) {\n center = true;\n gestures.push({ iris: i, gesture: 'facing center' });\n }\n\n const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];\n const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];\n if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06) center = false;\n if (leftIrisCenterX > rightIrisCenterX) { // check eye with bigger offset\n if (leftIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking right' });\n } else {\n if (rightIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking left' });\n }\n\n const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];\n const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];\n if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022) center = false;\n if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01) gestures.push({ iris: i, gesture: 'looking down' });\n if (leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022) gestures.push({ iris: i, gesture: 'looking up' });\n\n // still center;\n if (center) gestures.push({ iris: i, gesture: 'looking center' });\n }\n return gestures;\n};\n\nexport const hand = (res): GestureResult[] => {\n if (!res) return [];\n const gestures: Array<{ hand: number, gesture: HandGesture }> = [];\n for (let i = 0; i < res.length; i++) {\n const fingers: Array<{ name: string, position: number }> = [];\n if (res[i]['annotations']) {\n for (const [finger, pos] of Object.entries(res[i]['annotations'])) {\n if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger\n }\n }\n if (fingers && fingers.length > 0) {\n const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));\n gestures.push({ hand: i, gesture: `${closest.name} forward` as HandGesture });\n const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));\n gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });\n }\n if (res[i]['keypoints']) {\n const poses = fingerPose.match(res[i]['keypoints']);\n for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture });\n }\n }\n return gestures;\n};\n", "/**\n * Results interpolation for smoothening of video detection results inbetween detected frames\n */\n\nimport type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult, Box, Point } from '../result';\nimport type { Config } from '../config';\n\nimport * as moveNetCoords from '../body/movenetcoords';\nimport * as blazePoseCoords from '../body/blazeposecoords';\nimport * as efficientPoseCoords from '../body/efficientposecoords';\nimport { now } from './util';\nimport { env } from './env';\n\nconst bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };\nlet interpolateTime = 0;\n\nexport function calc(newResult: Result, config: Config): Result {\n const t0 = now();\n if (!newResult) return { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };\n // each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself\n // otherwise bufferedResult is a shallow clone of result plus updated local calculated values\n // thus mixing by-reference and by-value assignments to minimize memory operations\n\n const elapsed = Date.now() - newResult.timestamp;\n // curve fitted: buffer = 8 - ln(delay)\n // interpolation formula: current = ((buffer - 1) * previous + live) / buffer\n // - at 50ms delay buffer = ~4.1 => 28% towards live data\n // - at 250ms delay buffer = ~2.5 => 40% towards live data\n // - at 500ms delay buffer = ~1.8 => 55% towards live data\n // - at 750ms delay buffer = ~1.4 => 71% towards live data\n // - at 1sec delay buffer = 1 which means live data is used\n const bufferedFactor = elapsed < 1000 ? 8 - Math.log(elapsed + 1) : 1;\n\n bufferedResult.canvas = newResult.canvas;\n\n // interpolate body results\n if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) {\n bufferedResult.body = JSON.parse(JSON.stringify(newResult.body as BodyResult[])); // deep clone once\n } else {\n for (let i = 0; i < newResult.body.length; i++) {\n const box = newResult.body[i].box // update box\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / bufferedFactor) as Box;\n const boxRaw = newResult.body[i].boxRaw // update boxRaw\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / bufferedFactor) as Box;\n const keypoints = (newResult.body[i].keypoints // update keypoints\n .map((keypoint, j) => ({\n score: keypoint.score,\n part: keypoint.part,\n position: [\n bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position[0] + keypoint.position[0]) / bufferedFactor : keypoint.position[0],\n bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position[1] + keypoint.position[1]) / bufferedFactor : keypoint.position[1],\n ],\n positionRaw: [\n bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].positionRaw[0] + keypoint.positionRaw[0]) / bufferedFactor : keypoint.position[0],\n bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].positionRaw[1] + keypoint.positionRaw[1]) / bufferedFactor : keypoint.position[1],\n ],\n }))) as Array<{ score: number, part: string, position: [number, number, number?], positionRaw: [number, number, number?] }>;\n const annotations: Record = {};\n\n let coords = { connected: {} };\n if (config.body?.modelPath?.includes('efficientpose')) coords = efficientPoseCoords;\n else if (config.body?.modelPath?.includes('blazepose')) coords = blazePoseCoords;\n else if (config.body?.modelPath?.includes('movenet')) coords = moveNetCoords;\n for (const [name, indexes] of Object.entries(coords.connected as Record)) {\n const pt: Array = [];\n for (let j = 0; j < indexes.length - 1; j++) {\n const pt0 = keypoints.find((kp) => kp.part === indexes[j]);\n const pt1 = keypoints.find((kp) => kp.part === indexes[j + 1]);\n if (pt0 && pt1 && pt0.score > (config.body.minConfidence || 0) && pt1.score > (config.body.minConfidence || 0)) pt.push([pt0.position, pt1.position]);\n }\n annotations[name] = pt;\n }\n bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints, annotations: annotations as BodyResult['annotations'] }; // shallow clone plus updated values\n }\n }\n\n // interpolate hand results\n if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) {\n bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand as HandResult[])); // deep clone once\n } else {\n for (let i = 0; i < newResult.hand.length; i++) {\n const box = (newResult.hand[i].box// update box\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor)) as Box;\n const boxRaw = (newResult.hand[i].boxRaw // update boxRaw\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor)) as Box;\n if (bufferedResult.hand[i].keypoints.length !== newResult.hand[i].keypoints.length) bufferedResult.hand[i].keypoints = newResult.hand[i].keypoints; // reset keypoints as previous frame did not have them\n const keypoints = newResult.hand[i].keypoints && newResult.hand[i].keypoints.length > 0 ? newResult.hand[i].keypoints // update landmarks\n .map((landmark, j) => landmark\n .map((coord, k) => (((bufferedFactor - 1) * (bufferedResult.hand[i].keypoints[j][k] || 1) + (coord || 0)) / bufferedFactor)) as Point)\n : [];\n let annotations = {};\n if (Object.keys(bufferedResult.hand[i].annotations).length !== Object.keys(newResult.hand[i].annotations).length) {\n bufferedResult.hand[i].annotations = newResult.hand[i].annotations; // reset annotations as previous frame did not have them\n annotations = bufferedResult.hand[i].annotations;\n } else if (newResult.hand[i].annotations) {\n for (const key of Object.keys(newResult.hand[i].annotations)) { // update annotations\n annotations[key] = newResult.hand[i].annotations[key] && newResult.hand[i].annotations[key][0]\n ? newResult.hand[i].annotations[key]\n .map((val, j) => val\n .map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor))\n : null;\n }\n }\n bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, keypoints, annotations: annotations as HandResult['annotations'] }; // shallow clone plus updated values\n }\n }\n\n // interpolate face results\n if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) {\n bufferedResult.face = JSON.parse(JSON.stringify(newResult.face as FaceResult[])); // deep clone once\n } else {\n for (let i = 0; i < newResult.face.length; i++) {\n const box = (newResult.face[i].box // update box\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / bufferedFactor)) as Box;\n const boxRaw = (newResult.face[i].boxRaw // update boxRaw\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / bufferedFactor)) as Box;\n const rotation: {\n matrix: [number, number, number, number, number, number, number, number, number],\n angle: { roll: number, yaw: number, pitch: number },\n gaze: { bearing: number, strength: number }\n } = { matrix: [0, 0, 0, 0, 0, 0, 0, 0, 0], angle: { roll: 0, yaw: 0, pitch: 0 }, gaze: { bearing: 0, strength: 0 } };\n rotation.matrix = newResult.face[i].rotation?.matrix as [number, number, number, number, number, number, number, number, number];\n rotation.angle = {\n roll: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.roll || 0) + (newResult.face[i].rotation?.angle?.roll || 0)) / bufferedFactor,\n yaw: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.yaw || 0) + (newResult.face[i].rotation?.angle?.yaw || 0)) / bufferedFactor,\n pitch: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.pitch || 0) + (newResult.face[i].rotation?.angle?.pitch || 0)) / bufferedFactor,\n };\n rotation.gaze = {\n // not fully correct due projection on circle, also causes wrap-around draw on jump from negative to positive\n bearing: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.bearing || 0) + (newResult.face[i].rotation?.gaze?.bearing || 0)) / bufferedFactor,\n strength: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.strength || 0) + (newResult.face[i].rotation?.gaze?.strength || 0)) / bufferedFactor,\n };\n bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values\n }\n }\n\n // interpolate object detection results\n if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) {\n bufferedResult.object = JSON.parse(JSON.stringify(newResult.object as ObjectResult[])); // deep clone once\n } else {\n for (let i = 0; i < newResult.object.length; i++) {\n const box = (newResult.object[i].box // update box\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / bufferedFactor)) as Box;\n const boxRaw = (newResult.object[i].boxRaw // update boxRaw\n .map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / bufferedFactor)) as Box;\n bufferedResult.object[i] = { ...newResult.object[i], box, boxRaw }; // shallow clone plus updated values\n }\n }\n\n // interpolate person results\n if (newResult.persons) {\n const newPersons = newResult.persons; // trigger getter function\n if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {\n bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as PersonResult[]));\n } else {\n for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow\n bufferedResult.persons[i].box = (newPersons[i].box\n .map((box, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / bufferedFactor)) as Box;\n }\n }\n }\n\n // just copy latest gestures without interpolation\n if (newResult.gesture) bufferedResult.gesture = newResult.gesture as GestureResult[];\n\n // append interpolation performance data\n const t1 = now();\n interpolateTime = env.perfadd ? interpolateTime + Math.round(t1 - t0) : Math.round(t1 - t0);\n if (newResult.performance) bufferedResult.performance = { ...newResult.performance, interpolate: interpolateTime };\n\n return bufferedResult;\n}\n", "/** Face descriptor type as number array */\nexport type Descriptor = Array\nexport type Options = { order?: number, threshold?: number, multiplier?: number } | undefined;\n\n/** Calculates distance between two descriptors\n * @param {object} options\n * @param {number} options.order algorithm to use\n * - Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2\n * @param {number} options.multiplier by how much to enhance difference analysis in range of 1..100\n * - default is 20 which normalizes results to similarity above 0.5 can be considered a match\n * @returns {number}\n */\nexport function distance(descriptor1: Descriptor, descriptor2: Descriptor, options: Options = { order: 2, multiplier: 20 }) {\n // general minkowski distance, euclidean distance is limited case where order is 2\n let sum = 0;\n for (let i = 0; i < descriptor1.length; i++) {\n const diff = (!options.order || options.order === 2) ? (descriptor1[i] - descriptor2[i]) : (Math.abs(descriptor1[i] - descriptor2[i]));\n sum += (!options.order || options.order === 2) ? (diff * diff) : (diff ** options.order);\n }\n return (options.multiplier || 20) * sum;\n}\n\n/** Calculates normalized similarity between two face descriptors based on their `distance`\n * @param {object} options\n * @param {number} options.order algorithm to use\n * - Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2\n * @param {number} options.multiplier by how much to enhance difference analysis in range of 1..100\n * - default is 20 which normalizes results to similarity above 0.5 can be considered a match\n * @returns {number} similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity\n */\nexport function similarity(descriptor1: Descriptor, descriptor2: Descriptor, options: Options = { order: 2, multiplier: 20 }) {\n const dist = distance(descriptor1, descriptor2, options);\n const root = (!options.order || options.order === 2) ? Math.sqrt(dist) : dist ** (1 / options.order);\n const invert = Math.max(0, 100 - root) / 100.0;\n return invert;\n}\n\n/** Matches given descriptor to a closest entry in array of descriptors\n * @param descriptor face descriptor\n * @param descriptors array of face descriptors to commpare given descriptor to\n * @param {object} options\n * @param {number} options.order see {@link similarity}\n * @param {number} options.multiplier see {@link similarity}\n * @returns {object}\n * - `index` index array index where best match was found or -1 if no matches\n * - {@link distance} calculated `distance` of given descriptor to the best match\n * - {@link similarity} calculated normalized `similarity` of given descriptor to the best match\n*/\nexport function match(descriptor: Descriptor, descriptors: Array, options: Options = { order: 2, multiplier: 20, threshold: 0 }) {\n if (!Array.isArray(descriptor) || !Array.isArray(descriptors) || descriptor.length < 64 || descriptors.length === 0 || descriptor.length !== descriptors[0].length) { // validate input\n return { index: -1, distance: Number.POSITIVE_INFINITY, similarity: 0 };\n }\n let best = Number.MAX_SAFE_INTEGER;\n let index = -1;\n for (let i = 0; i < descriptors.length; i++) {\n const res = distance(descriptor, descriptors[i], options);\n if (res < best) {\n best = res;\n index = i;\n }\n if (best < (options.threshold || 0)) break;\n }\n best = (!options.order || options.order === 2) ? Math.sqrt(best) : best ** (1 / options.order);\n return { index, distance: best, similarity: Math.max(0, 100 - best) / 100.0 };\n}\n", "/**\n * Analyze detection Results and sort&combine them into per-person view\n */\n\nimport type { FaceResult, BodyResult, HandResult, GestureResult, PersonResult, Box } from '../result';\n\nexport function join(faces: Array, bodies: Array, hands: Array, gestures: Array, shape: Array | undefined): Array {\n let id = 0;\n const persons: Array = [];\n for (const face of faces) { // person is defined primarily by face and then we append other objects as found\n const person: PersonResult = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };\n for (const body of bodies) {\n if (face.box[0] > body.box[0] // x within body\n && face.box[0] < body.box[0] + body.box[2]\n && face.box[1] + face.box[3] > body.box[1] // y within body\n && face.box[1] + face.box[3] < body.box[1] + body.box[3]) {\n person.body = body;\n }\n }\n if (person.body) { // only try to join hands if body is found\n for (const hand of hands) {\n if (hand.box[0] + hand.box[2] > person.body.box[0] // x within body for left hand\n && hand.box[0] + hand.box[2] < person.body.box[0] + person.body.box[2]\n && hand.box[1] + hand.box[3] > person.body.box[1] // x within body for left hand\n && hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {\n if (person.hands) person.hands.left = hand;\n }\n if (hand.box[0] < person.body.box[0] + person.body.box[2] // x within body for right hand\n && hand.box[0] > person.body.box[0]\n && hand.box[1] + hand.box[3] > person.body.box[1] // x within body for right hand\n && hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {\n if (person.hands) person.hands.right = hand;\n }\n }\n }\n for (const gesture of gestures) { // append all gestures according to ids\n if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures?.push(gesture);\n else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures?.push(gesture);\n else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures?.push(gesture);\n else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);\n else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);\n }\n\n // create new overarching box from all boxes beloning to person\n const x: number[] = [];\n const y: number[] = [];\n const extractXY = (box: Box | undefined) => { // extract all [x, y] coordinates from boxes [x, y, width, height]\n if (box && box.length === 4) {\n x.push(box[0], box[0] + box[2]);\n y.push(box[1], box[1] + box[3]);\n }\n };\n extractXY(person.face?.box);\n extractXY(person.body?.box);\n extractXY(person.hands?.left?.box);\n extractXY(person.hands?.right?.box);\n const minX = Math.min(...x);\n const minY = Math.min(...y);\n person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box\n\n // shape is known so we calculate boxRaw as well\n if (shape && shape[1] && shape[2]) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];\n\n persons.push(person);\n }\n return persons;\n}\n", "/**\n * Embedded sample images used during warmup in dataURL format\n */\n\n// data:image/jpeg;base64,\nexport const face = `\n/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA\nAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAARAAAATgAAAAAAAABgAAAAAQAAAGAAAAABcGFpbnQu\nbmV0IDQuMi4xMwAA/9sAQwAGBAUGBQQGBgUGBwcGCAoQCgoJCQoUDg8MEBcUGBgXFBYWGh0lHxob\nIxwWFiAsICMmJykqKRkfLTAtKDAlKCko/9sAQwEHBwcKCAoTCgoTKBoWGigoKCgoKCgoKCgoKCgo\nKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgo/8AAEQgBAAEAAwEhAAIRAQMRAf/E\nAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAE\nEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZH\nSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1\ntre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEB\nAQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXET\nIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFla\nY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXG\nx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A+qaKACigApGOKAML\nXp8xlF5A7V4X8RtYs7PzfNImnx8sa8Kp9z3q2tEgp6angWs62ZZ5CTGoJ6DArGNz5p+UrID6EUrF\nPUlW1EuN0XNW7PQ2L5j3JnoKXN0KijqNP0eYoqXBdgPuuo+ZPeupisWn2Jd4+0r924XgsQOCff3/\nAJ1FzRKxDqGii6m3siiQ8F1XGfXI6YNWLfRbiRQMkcZI9fpTDluT2/h6Qy8gDPbtmtG38JeY480Z\n5zSLUTZg8M28YwYxjAArXtdPt402qgHbpSaLWhma3o0Uqk7Nx9DWLaaVblgPs6qRyds2M/gRSQp9\nzZOni2iWS2hlQ+kjYz9OMGrdjq89vIPPVhj+8M/lQyDq9P1WOYBlMZz1AOD+VdDaTiReOKulK0jO\ntHmi0WDTlr0TyxRVhT8tJjIX+9SUxHXUV553BRQAVBcPhSBTSuxPY86+IGti0s5I7dsORy9fM3i6\n8e8mfDO5P90ZrWWiJicNPpZZtxV/xrW0jQt4DOv6Vk2dEEdTY6BHuB25rpbPSo0QARjP0qTRI17W\nwA/hFaMWmoQMgflQXYsDS142rU9tpqqenfNA7GgtihxkdKuRW6qMY/GkDZY8sY4Ap4hXbyB+VArk\nEtuH4wPyrk/EGkOm+a3jw3suRQLc5i38SX9hJ9nnY+XnBUdPyNdFY6pa3KkkAE9l6f8AfJ/pSJT6\nGhDmI+Zb4ZRycdv6ium0nUhKFydrelTsNnS2829RnrVgV6NKXNG55lWPLIM81Op+WrZkRMfmNNzT\nA7GivPO4KKAEY4XNYWt3vkwPg4OK0giJdjw/xrqhm87Zs8tc7pX5A+leSajf6aHYJ50kn4AZpTep\nrBWRm2Vobm4BXfyehPFdnpmnBFUY5rI2SN63tlToK0YI+KZpFF+3QdavwoKTLtoW0Toaswpk5pCb\nLCxipAhoIuP2dKevHXoaYDylRyxhlwRQI4nxVoCXWZI1GfpXGtbSWjYPGP73+NIGupt6TqMsLruZ\nih4xnP5V09mQ+JLd8gn0xSYJnVaVdkook69K34zuUGunDS3Rx4qOzHVIp4rrOMY3NJQI7GivPO8K\nKAILt9kZrz3xlebYiu8KCCWb0XvW0NFch6ysfO3jLVjfXLIn+pQkKorl7WxNxIPl71g2dUUdpo+l\npBGvHPet23iC8ihFosrxirkHQUFo0IF4FXI1O726CpKLacCrMJoJLYHAPpTwucHpSRJJ5e4AZI9x\nUqpxzVpCuOC8cUpQUMRnXttuB4rjNdsYyeVwfXpmpGmcvcQyafMCFJjPY10eg34BUg4DcZP8jUO4\nHaRq3lLNF+IHet7R7jz7c56rwa2wz9+xhiVeFy/T1PFegeaNPWigDsc0ZrzzvDNIaAM7VpNqdegr\nxL4l6kywyRhseZ19lrdfAZL4jxYg3Fw20d63tJsdrDI5rm3Z3R0R0Mce1eKnQYAplIkWrMJ45oZS\nNO3PHbNXIyfpSGWowSOasxLUiZdjFSqtNEMkUemKlAGKsRJjAppFAiORMjmsTVrNZEO4cfSoZSOD\n1eJ7WXBUzQZ+7nkfSo7e2Ei+ZaMzxntjBX2NSU1Y6/wxqojiEFzkA8KTXYaUoWRyv3W5rSjpNHPX\n+BmpSg8V6J5gUUAdhRXnneFFAGHrTfu5PpXzj8S70/aZtxzztXFbv4DKHxHI+H4GZiz9zxXXW8G3\nGBXMjvLRXAx0oPGPSmMVeOnWrMTYpFI0bcg1fh54xmgovRcD3qxETSIZcRvzp+/BpEkqsBUqsM9K\nq4Em4Gkxk0yRGXrVW6i8yFhkg+tJjRxGsWrxllkUMh9eK5uMz6bcebbnfG33kPcVkay2OntPKuo0\nnhXI67c8qa7Lw3c+adjcEDGK1paSRhVV4s6A0or0jyRRQ1AHX0V553hRQBz+vNtt5z3xXzX8Qbdm\nuic5YnOMdK3l8JnTXvlbwpYl+WySOgrp5YfLOOB9O1c62O7qQkc+9RsKChFPWp4DluOlSykaNruH\nArUgHShFNF2NT1qxGO3NBmyxGcE1N2560CFzjrUysO9JAPDDjFOVuKoQuSRTWouBkazbCa3cd8cV\nwF7IISQccHBzUSWpV9C3o1x5b5GAjdQD1rs9DjC3kckbEhqKfxIzn8LOupRXqnkPccBSkUAzraK8\n87wooA5rxMSI3HqK8B8bQl9Q8sffY5b/AAraXwkUviNrw9pH2W1ViMMRTdRjw4HpWNtDti9TPc4P\nFQs2M5qdyyMHLcfjV63HTAoBGtap0wK0YxigpsuRDtVhVYd6GQydVwwIqdRnqKCR23I5pCMUW6gD\nYNKuetAEise9KTxQBWuFyhrznxNZkXjFeN3I+tTIZg2OqmzmxNF0PO3vXp/g2+hukVl4zyPanTXv\nJmVR+60dpThXpnlPceopWFAbnV0V553hSGgRynjC5FujOey14Ssp1HxNmTnc+a3kvcIpv37HoEYQ\nQmMdVHSsnVbYJF5jVk0dsNzlruVIsl2wKxbjWrVHILjg1CRbZJb+ILHPzyhfStODWLQgFJFYd+el\nUJM27HUIXxhga1Y5lLVLKLkMnoauxnPPrSEx7ShF+Y/n2qrc6xBbhizDAqkK1zJuvG9nbg8ZA681\nly/Ei052RO3uKAsZlx8QGd8xxvt9Aa1NH8dK7AXMcip64zigdkdrZX8F7EJLdwwNXMkrz1qRMRly\nCK4TxmpidWI49felPYSOMmi80NIoOV6qRzXYeA5SskYPfirpfEjGr8LPWVHyD6U4CvQPL3ZItOYc\nUDOoNFeed4Uhpks4H4iE/Z5MeleMeGULeLgjds10S+BGdL+Jc9OSBU2Huc5Nc74yvUtrcDBrJnZF\n63PJdXvLy/lKWw46bvQVz82jXhkLO5Y+9ZlsYthcRnbIjY9R3q3awTRkEM3WmJI6C0ea3dGRsr1x\nXY6TqW9FLHnjrUs0izpLK5DDjofSta3ckH09KRUkZuuTvFGdvPauE1Y3U6Mqbssf/rUxHPTaJPK2\nZmJPbBqzY6DCZh5xJC9s9aBJHU6dpemJjfEmfetJtI0+VPkUr/unFOxdiextHs33W07YHQHk11mk\nXb3KbZ1xIvcd6LEyWho4Nct41sTPYb16ipexCPPZN+wYGCvH1rrPAEJmvkPoc1VL4kZVvgZ6yFwK\ncBXoHkkqinFaVyzo80GuE7WJRQSziPiGdthK5HQV4x4J/wBI8WPIewNdEvgRNL42emO/yj1UHNef\neNpRczbC+I17DvWT2OqJxc0sMK4TCisy41q0hfEkqj8aixdwTXNOlwvmqD9anS9tXH7uVG+hosO4\n/wC0oOhrR0+6G4YNIEzsNEuCxAPNdjZruA4xxUmjINSjURksOlcbqFykbnjFA1sYGoassaknCqO5\nrl7rxhGm7yBnBxuJq0rkSlYpw+NLlsfd5P8AerVsvHEqSBHwPVgcgVpyMyVXU3rXxcHYETAk+hru\n/DWti6ZSTyOKzZqndHaxvvUGq2rQ+dYyqR24qWI8dvbr7LqDxyDAzXpvw6FvIxePGSM06Xxoyr/A\nzviKFHNegeX1J41zUhXioGbuaSuM6wpCaBHG/EcA6HN/exxXjXw2jL67cv8A3Qa6H8CFR+NnoWpO\nI4XI44rxLxrqjQzSEsQM1gdSPM9U1uR1YbmWIdXHf2rmpIb67YS28UrRlsLI3c/jW0VZGUpO5pW1\njfLNOjahawzwReYI5cjzMkDavHJ5/SrVv9uhtPtVxCPLBwzxnlT9KGghLU3tKvvPjHzbl7EGuisJ\nGRxWLOg7nRXJEbDjmvSNK+aFSfSoZr0KutRkphc4NcRrdkVjL9aVio7Hk3iqS8ubhrWzUlsZY9kG\ncZNc5D4aee5MclzJIFTzHAO0MfatqSOWu7bFS1srDUZEis0vIZoUxPvfcC+4/dx2xjr712XiTwXb\nWmlQ6hol3cRhoFd4rlg3zY5wR0GelavQwjq7GD4etdVvSnk2wAB+9v8A8mvcfA2kXiRo0/UdcDis\nZnTTulqeoWqbUAJqWUb42X1FZlnjfjSwlGrr5S/eNdD4RkvLAAQ4yRyaUZcruVKl7TQ9I0G+mnzH\nckFwM8VuIK7ac3KF2eXiKapz5UWYxipNtMyNejNch0jSar3cjR27uoyQCRVRWom9DxTx54gu5fMi\nlbKdMVjfCZPNlv5v9rFbVHpYqjGzbOn8SzFI9o715L4u0r7arYzk+lYdTqSujy7U/C0u4vHk+WwO\nxuh9q3J9dgvbdVukMV1EwbDDgn04rZMwlHoZ+orZ6hfQ3RWVnQYCgZAq+8U0ln5NtBsV2yxYcfgK\nJtW0CnB31LlroVwJ1nQLGDjeP7w+lb0dsFxjrWB0tHS6NuWPJ6A16ToUm63T3Gallr4S7cxiTjrX\nPaxaF7dlVeSMUhxZ5jd+H7qCa4eF3DSE5x3zXN3Wk6jbyeaiFWUY6ZyPStYS5SalPmVipFbX0E4c\nW0alvmPHJrag0rVvEE6LdljGpG2NRtQD+tW5XMI0uU9M8NeFo9PiQhecDIIrtrOMIoG3H4VlJm9t\nC6CB06VPGM1IHLeItGS6uw+ORT7e3jsbQvj7gzUNam0JaWE+HN7NqOqX80n3FO1RXo8YzXdS+BHk\n4z+KyzGPapcU2YIv7qQtiuaxvcaWqG4O6FwfSrS1JbPnrxoxkv7qIfejcitj4V2f2exumI+8+aKn\nxHTT+G5d8Txlm4rjLxMsQwzWT3OiK0Mm6sEkVsAcjFc1d+FEmlGwEDPQVopaEuOpr6f4ZWNAu3tW\nvHpAj5ZQcUFIWaDjGMVUMQ3cVDBmvbhY7QAV2nh+T/R1yeKhlrY31+b61FcQK6nIoJMi401WblRi\nqr6PCw5UYq9y+YgOgWzNkRrx3xWjp+nx2v3FQcelAbmko9anQ4GBUNisPHWr1qMrQhS2K11HvmYV\nhamcxSRZ5xRIqluS/DKAQQXZxyXrvo2FdlL4EeZjH+/ZbjNSZpswLNBrE1Gt7VE4ODVIlnh/j61F\nj4lmeTGyUbq6LwdEqWbeX0YbhSqfEddP4Bddj4JIrhL5d8h7VjI6oLQqKNzelWre3yc4/ClFjaL6\nwqBxxUUxwCKu5BmXRA6c+9ZjP83FSBoQuPs4BrsNBlUW659KmRrDY6G1lyQtW3Hy0lqQ1qVJnAbm\noy3b9KYJCqRj3o4zRctIlhjLHmpSuOBRbQOpLGpPFaES7UqkZzKN1KsEc87/AHUUmvPLTVGv72aQ\nk7WJwKmRrQ3ud74Ltilgz4++2a6iNDXdS0gjyMU71my7GpqTbxSbMki3SViajTTHqkSeR/GeyZmg\nnQHkEE1S+F+oPPavBL96I4/Cia1udVF+4dVrkW+Fq8+v4tjMDWUkdVJ6WM0cNV+F+MVmjUcZgqnP\n1qpNNnkcVRLiZtxIS1UzzIF7mghlxUZpVQdq6nTVdAoAOKzkbQWhvwM6gMM1twOJYx3NOJE11Kt1\nH1/pVVlwBkk+9NocXoOQ45FPj+fkUJFF2NSB700v/hTEty5ZpkjvVyUgcCq6GM9zC14/8Se6GcZQ\n1574Xs5WkI2HBPHFQ1dm1KSSZ7Rotn9l0+KPHIHNacae1dy0Vjxaj5ptlhVp+2s2CJ9ppCKzuWNx\nzSFc1SYrHNeNdIGpaYw25ZeRXmvheyk0jVpEdcLJ0q3ZxNKTa0O3vQHg/DNcHrsJDmsmjspnNzNt\nfFIJ24GazOhC+azDmgZIOOKBsp3J2qSaZodubq58yQ4QAnmhGT3NO18pb7BORmu205LfYpyKVkWp\nOxr5gKYWoIZWgfGfloFq1qTPLubnGO1RPtxg4P0oBAkY/hBz6VNDDkZ6AU0W2WSdqkdKr9ZOaGSj\nVtcLHmnOcgmmYvcz7mBLy3MbdD1q9ouiRK6bUAVeelOC1InPlidSsWMDFOCEdq3uefykqrinYqGy\nrFvApMVka2DAowKAsMkRXQqwyDXn/iWyitNQ3qPl6itIvRoF8RXinW4tQ6HI6GuW8SIVBPalc6qe\n5x9x97r3qruwTjrWZ0ksZ9TUmcDNAmZ9/wAoao63rR0+w22MLPtAzt6mghmfofiB76LdJBJBIp5D\nd/oa7bSdWLIPnpDi9TM8TeKdas51XTbIyxd3J/pXS+E/EFxqNoFu7do5OmD60maHWrnZyDRkn/69\nMlEyOR0xntVoNx+FUgYjPxg4FLCuWDZyKQr2RoRnP0qO+nEFpJITgAUzLqZnhu6+0rknOTXpOmwJ\nFbrt5yMmnHYyr6Oxb2ijaKLnPYMClwKQWK3n0hn+lachHOJ9pNNN0apQFzsY10a4v4hXQh0xpieQ\nMA1XLZNjhK80cT8OdV+3Wl3A7ZZJCw+hrR1qLcjZ/CsbnfHRnFXseHJArOYYbrUs1uPhYbuatqFP\nByfSkMq3UIINYkto+87Tx6GkSxfsDbflGD7CtTw/pk4nzITtPIFMFudsukh4Rxz71paTpKwP5jcn\n0qTRy0NORMDgVCqewoJTJgAoxjntTiTu7fWmFxAcnn1q3EPl+X8KZMi4gKqB1Peob/Tv7Us5bfeU\nyOoq4R5nYxqT5I8xieH9J1DTbvyJELRg8ODwa9Ms5mSFV9BWiptbnNVrKdmif7Q1KLg96XIZc5Is\npNL5pqeUrmMtZs0jzV08phchaY00zH1p2ZNxjS1g+LdJOt6U9ssmxjyGp2urDjLlaZzng/wUPDqz\nTSTmWeTrjpVjVk3Rvjr2rnqQ5dDvo1XUd2cTqSNk9OKxXGCeKxZ1DAxHTr2q5C/y8GokUhsz54qu\nuCxzSQjQ0+FZblR2ro4bZYiMVQ0dBb7Qi5x0qzuG5QOh71LYErDufpSeWrHnimIXbjkUjLkH1Hem\ngGxryc+tXI19KYmWegq9YLiLJ7mtqS945cS7QsWehqxA9dEjz4krPSxyZqbFFhGxUm6smjRM55Lk\nHvSvNxXTY57kLT+9MNwKdhXGm5FIbkU7Bca1wMEVhaiuQcVhXWiZ14R6tHGanGBI2OtYkqEHjgVy\ns9ErEeo6UBsHipKEZs5qpPdRxcbhx70NCSuybTNWihc5brW9Fq6vjMnFSdEIdDRi8RRKygZbHFbu\nm6nb3RA3gMegNJhOm0jbXGOoxTuCc1Rz3FyoGKawz9KaAVcZqeMgCmIkB4FaUTbYwB6V00Fuzixb\n0SFMuDU8Mlbs4UPeXHeiOXkUrDuXYnyKk3cVk0ap6HMxxketSMhrcwRC0dMMZFMQ3yzSeVQAeUaz\n9Vj8uPd271nVV4m+GdpnHX67pCeKyLtBtNcR6xlk9RVeWTb3qRnO6trgttyIfm71z7ai8j7/AJmN\nDNqUVa5Yi1AnjynHuBV+11YJhWWXcP8AZNSzqgmaEerSsf3NtIQP4mGKtRavdRgMIpVI9KjU0a7n\nR6T43uYQI7qN2Tpkqciu503VVuQGAYZHQjFVc4alPlZrpKGAznpTwxOc9+lWjIlUACnM4XApiLNk\nnmvnsK0NvpXZRVonmYqV52GsmanhXitTmFkSiJTSAvwrxUxXIrJ7miOfjf1pzNWxkRlqYWpgJupu\n6gQbuahvIxPA6eo4pNXVioS5WmefakGhndH4INZs5DJXA10PaTurmLO21uKpSZqGMoXGnRzBiyjd\n9Kx5rcQS428fSkjanLoaOliHGZFB56VswW+mtPufcBsGOAfmxz+tFkd8HpoaUx09FAtFY8DO71qb\nSms/Nb7RbecG6AEjFLS5c78t+p0djpVs9wsyQiJAdyr1rW+zqjErzSe559Sbk9S3C+MA1bjbgE1S\nMSXzMVG0vNUI2tPKrAuCMnrVzNd0PhR49W/O2xrHmp4TxVMzQshpIzzQBehqesnuaI5VGzT2bitz\nFEbNTC1ADS1JupgG6l3UAc14s04yR/aYRll+8BXCtLncDXFWjys9TCz5oW7GddH5qqNzWDOgQnC8\nVSuo1kHzAGkPYopEY2+RWxV23Vzj5G/Kg3jWaNazhZuqNXS6TaKhB2c0jR1nJWOlhOxRxU4YkCgx\nY0OQatQyDbyaaFYe8uF4NY3iC9ltbVGj43NTIL3h7WzMihjzXVQXYYDdW9Cf2WcOJpfaRZ3g9KsQ\nmupnCLIabGeaAL0LcVY3cVmzRHIxtUhetzEjZqjLUAIWpN1ArhupwagAfDKQ3Q1594v0c2bm6tx+\n5Y8j+6ayrR5onThp8s7dzkZjuqAAmuBnqC7c0iwgtzSA0rWzjfGRW3ZadDu4AoNYo2rfS4v7orSh\n05UA2r0pDbsTm29KRottBNyJ0wpJ9KhD7f6U0ikNWffIFBz60zVUW52ow4UcUN6EPcx44WsbgOmd\nua7TT5Bd24KHnFKnLlZFSN4koluLdueRWvp14swweG9DXoxldHlTjYtzGoo25qzEvwtUxas2jRPQ\n5CNqkLVsYoYzUzdQA3dSFqBBmnqaBhuqhriCXTpVIzxUz+Fl03aSPI9QTypW2/dz0qKNw3SvOPZR\nMqin8VLKRcs3O4Cuk0w/MDjt1NBtHY6O2IIHY1pxgFaETIRwMkjtVSUEk4570MlFW5bap6dKzWm8\n1tqH8aY+hp2FvGoGayNevVt7/ap4xzUvYjqTLtvLPcvJxSaVcyWsxTnFZlnT2t15xHmCtOBYwQy4\nB9q7cPO+jPPxFO2qLEj5HWo42+aus4HpoX4W4FTF+KlotbHII9SFuK0MUNZqiLUDE3UbqBBupwag\nBc1DefPbyD/ZND2KjujyPWlKzuPesRZjHJXms9lMuw3StjnmphKDSLTJ7OfE3JrpbO4GQc9qlnRA\n3LO82k5NbFvdADkjBoCSHyXIIIzgVQvdRigT7wzjgUzO1jHknlvG7qnp61etYFQDIpCZoqVijzXn\n3iC8EmsOuaCGb/heR/s0ijkVv6fbxy3QMg5xmsnuX0Ldzut3+UYTPWk+2GJSe+M1pFtamcldalmx\n1eO4XaThhWnC+TXqR2PHqL3maUJ4qRjxSEjj42qXdxVmaGs1MJoATfSbqBAG5p6mgAzTJTmNvpQU\ntzzHXY83D/U1zF5FhjgV5r3Pa6FMsV5HWnLe7RhqBRdmTwagN2d2K2rPU1C5LAnPrUs6Iysbdrq6\nf3gK0BrUKj/WClY05iM6xLOcQAj3NT29uznfKSzHuadzNu7NSBFjHNSm5VO9IRnajqoWMhTzXFtA\nbvUfMduSeg702Qz0rS7FbTToQFwzjJqaGTFyfK5PQViyzUuFmuIdgGABya5u/vTaN5cnUHFUmLoZ\nzyskwlgJweSK6zQdUEwVJeGr0aUrxPLxEfe0OrhPAqVjxWhznGRtUwatDK4jNxURbmkAm6jNABup\n6tQAFqhupNtu59qUnZFwV5JHnWsHdIx96w5lz15rzT2uhRmt85xWbcxMnUGmZlB0bdxmrNvFIcfM\n350mWjbs7YkDJY/jW5ZWW4jikWkdNp9mqYJFaJdEHHakUULu/VB1rLn1Ld/FgetMGYd/qWSQmSa0\n/AemS32pfa7piLeLkg9z6UmQtz0W7uQ2cZx0A9BVzR7cAea6j2rPqX0L99KRat5A6Dk1wOoKZ52a\nYfMORTYRLujiGWEq6/NWza2yKQVHNdOHerRy4laJo6TTnbbtb8KuM3Fdh5z3OJjbmpt3FaMxAtUZ\nagBN1GaQBzTwaAAms3VbjERUGsa07RsdeFpuUuY4jUjljWTKK4j02RE4IpJYFk6imQkVl0xWarsO\nmAEcUi0bNnZBR0rWtoguMCkUi21wI161mXuocEKaYXMS4u+pY/hVCSWSY4HT0pEmlouiSahdpEBl\nmOceleiwWcNjClvHgJH97Hc1EmVFFi3Czy7mwIl/WtJbjP7uLgd/apQ2VNVvtsBhiPzdK5S4nAuR\nnqOCaTGi9pcytPlU+XpmumtWII44rah8ZjiNIXRuWeNvvViQ/LXpJWPJbu7nCRvVkNxVsxBmqJmo\nEPiXca0YLMuOlJsuKuPlsSi5IrNuG8s4HWs5VEkbwoOTKsk+FJY4rC1K53k1xTk5O7PSpwVNWRzt\n4cms+WpKICtSLTETQj5q0YeBSGiys23pUguGxQMq3E59ayrm4x3yaAKiRtO2WPHcmhruKFxFajzZ\nScA44qRHoXhuMaLpxaUg6hcDLMf4F9KlhuDeXGASIl+8azZslYma68y48m1+7nFW5rtbRNhb5z1p\niMKbUg0zuW4A4rPgb7VdKXOMmpA7HRbMS7nUYiUda0lkQOBngVrS+JGdbWLRt2bAx5BqeQ/LXpnj\nPQ4GJ+ashuK0MhWaoWcA0AaOmASMK7jRNPWYBmHyiuepO2x10qfcv6vYxCzYqoGK4HVYVTJrmb5l\nc6oaM5TUJ8EgGsG4kLNUHT0M64OaqMMikSRsuKbnFMRLG3zVehOaGNE445NNlnVFpDMu6uie9Vo1\n8z5mOAOST2pDK91cNN+5tsrH3PrW54a06KxT7fdrlh/q1Pc+tJ6IUdZGvHPLezMcnBOWbsPap5r3\nylFtbdT1xUWNWzU0/Zbwlgfmx8zGsHWtRHmMqE59aAMyNifvHPc1f0gtPdqkY5JosJHeNci2tktY\neuPnNY+oXWZEVJNrZ9aun8SIq/CzodHuriIokhDIR1ronbKZr0o6o8ipoz//2Q==`;\n\n// data:image/jpeg;base64,\nexport const body = `\n/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAsICAoIBwsKCQoNDAsNERwSEQ8PESIZGhQcKSQrKigk\nJyctMkA3LTA9MCcnOEw5PUNFSElIKzZPVU5GVEBHSEX/2wBDAQwNDREPESESEiFFLicuRUVFRUVF\nRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUX/wAARCASwBLADASIA\nAhEBAxEB/8QAGwABAAIDAQEAAAAAAAAAAAAAAAEDAgQFBgf/xABDEAEAAgECBAMECQIDBgUFAQAA\nAQIDBBEFEiExE0FRBiJhcRQjMkJSgZGhsWLBJDNyFSVTY3OSNEPR4fAHFjWCokT/xAAYAQEAAwEA\nAAAAAAAAAAAAAAAAAQIDBP/EACARAQEBAQADAQEBAQEBAAAAAAABAhEDITFBEjJRIhP/2gAMAwEA\nAhEDEQA/APqYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAKNTq8OkxzfNkisQC8eb1XtRNbzXT4q7eU2nu0MntRq/D8StMccvW29ZmdvgjsTyvZjxOLj\n+s8WLxn8TFPXs6Oj9oct7c14rkxz22nrB2I49KOdTjelmszfmpMeUxv/AA28OqwZ4icWWtt/SUi4\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmdo3nsPNe0Pt\nFh09Z0+DNWL7+9O/7A3eJcZppsV5raI27esvH6jX5ddM25p79Ilo59VbUZOe2Tm/PeGvfPfT2iKR\nPLv1+DO678XmW/a97U6TtOyzTbTF538/T9WjTNecm9a7126tqk3rSYxY5ta1plRZqZNXGjyZcPXl\nmZmsx+qjBrsuO16xM7eXRt04JrdTltk5OWJnfaWf0a2lty5MdZnfzSn+WOHiOutFpjHa9e8bQ2fp\n+alYy462pk7zXbuxjPesbRS0f6ZZV1ET1tErzXFLHo+A+1ddZf6NrI8PJHa1vN6iJi0bxMTHwfOa\nzhzd61v1846utwniM6DUdb3nBaNrVmd9vjC/ZVePYirBqMWppz4rxaPgtEAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAItaK1m09ojcHnvarjM8P0vh49+a/eY8ng9D\nh1fGM1rxjtGPfvbzdbjuTJxHX48cTPNltM/KsS9Dw7S49Jp6UpHaGe2vjz1y9J7LYK13vHWe7bj2\nex1tvM80ekuxW3RnW3Vm6P5jRx8H0+OYmMcb+bapo8GKPdpC6bQwtdHU8JpWkdJ/JweL6e23iU67\nd4dubSqyVi9Zi0bwIs68XGp36TtEq7ZJmZmevzdbifCKWtbJinkt6eTgZPFw32t+sRurbWVzxs1y\nRv6T8V1NZNPtfq0seTm+Kevr+SZuxXjvaPiV8N4viycto9HseG6+uu08W6Rkj7UPmFck1tE1nlmP\nLd3eA8V8HVVi1pjq6Ma/pnqce/ERMTETHaUrKgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAADW19+TQ5p/p2bLS4v04Zmt5VjeQeJ4bjnLqsupv+Ka1+ERLv4reTmcNxcuC\nvy3l0qdI2hlr66sT02ot0ZV7qqrInruzrVZLGSZ37JjqgYTG0K5lbaFVhDT1Ub456RPweY4hixWi\neSdpjvD1eWejz3FNHWYtkpvFo9EIseb3tS3SerOms22rfpPqZKzvvHSYUz70TExG6Gdbs2rljeJ/\nMx5L0vEzPaelnOi98c9J2bFNTFpit47+a+PVUvx9T9nOIfT+GV5p3yY/ds67wvsXqpxau+G09Lx+\nr3TqrEAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADV4ljnLw3U0jvO\nO0fs2lWqyUw6XLkyfYrWZkHldBEV09eveG3Fq1mI3jd4vPrOIaid8G9MP3Y38k6fNrt/rMk9Ou8s\ntfXXn49rGWInuy8SO/k5Gl1E3rG/fzbOe94wTy99mbRvTrMOOvNfJWsesywniukrG/jU6fF43WYN\nTmtEeJtEQ06aSmK2+bNtEd+qfSO17unF9Hmvy1y13XWyVmN4tExLxVK8PmNq5NrT58zawam+m/yc\n0Xj8NpRYSvQZ7xEOdqI3rPozxayNRXe0ct/ON03jmrKB5nV4q1yTO20Obmv4c+cx8HoeI6WZpNoj\nq83niYmYscU0r8aJ6T1n49zeJ+Meqm1drb9J+Kd5p136StGVem9l9TbHxLDFp7W7+sS+q1nesT6w\n+PcAzVjiGHftzQ+v4f8AJpv6On8jH9ZgIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAABp8VrW/C9TW0ztOO3b5Nxp8VmI4bn37TWYB8f1HFtTfUfR9FWJmsdZ9I7MtJxDX5s\nd8ta1y0xzteaR2277rcuhycP12SceLxMeWNpjttHwlu8I0mfQ1y+D7k5YmJmY36T36Ka43z/AF1t\ncI1ds+qxVj7/AEej19PCw9HJ4NoK4OIU5Y35YmZdzVTGebVZabx5jJS+Tmns81rNLm1Wrzc9rVw4\nYibbem72mXTTS0w0M3BvEta1bWrM95ie5EanY87wXgNOL6XPfxraXLhra/W28bR/dzYzarBqJxRe\nbzE7Rt5vWU9n8mPHOGmS0Ypnea1naJb+k9ncNLR7u2y/WcxXO4TOoyUrN6zD0FaW5Y3hu49FiwUi\nKxCvLMR0hlW0jn6ukWw3iXjOJzbDlneOj3GaN6zDzfFOH+LE7SRGo83XNSZ2lbG2/WfdlvaT2cy6\nrNFInlrv1mfJ37cK4PwTTxOoidRm2+/2/KFuyMp47XB4LivXiunrH2b2iH2qn2K/J8x4fGDNxTSZ\n9Nh8OviRvTyfT6xtWI+DeXs9MNZubypASqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAOZx6/LoOWPvWiHTcf2hiZ0e8fc2mf1E5+vP/AEeuSd7RC2uKtI6QjHfeINTfwtPf\nJvty9WPfbt/lucP03gxfJf7d/wBoReYpm97zaNeLb4Ims9Nt94auDjem1Wo5PFi1onylS+1o7l8V\nbxvtupjDMdNkYtXS1+Stt+m63xImEJ4xjHER2ZxMUjeUTO3VRmydBbjLJqPi08mbeVOXJPq1sl5Q\nVbkz9+rRy35rxHqzmZlVEe/Ez5LRlW5iyfR6zffaIjq1OSNZps2a21rZInafSPJhxGMl9LStLRWM\nlorM/A4dkrWbYfLZC2W/7K6eubX6b4RzT+W76K8b7G6X62cu3Sten59nsm3j+OXz3/0ANGIAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0OIYfpOHPijvNNo+fdvtXJO18k/\n/OwPFYbz2ls3jx8VqW6xMdWPEdP9D4lkx/dt79flLLHbkxTPwY6nt2512ORTRzE2x4/dpE7cvkme\nE4IrW3hRMxO8THRtU1FKWtvtvK2upx22rzRCtXkqzh2jtF7ZbT122b01ndnpuWuP3Z3+Ky20qDVv\nfauzVy3mejZzNK8dVjqi87KLRLYtXruqvXzkQp7Qoid88R6rcl+WGlW0/Sa22mfhCZOq2x082ix6\njkm822pO8VrPdr4dNObVeDo8XW3uzMbzK+mvxT7szE27cvnu9j7PcNjSaXx8mOIzZevbrEeic5tN\n+SZnpt8J4fHD9HXHO3PPW0x/DeBtJxx29vaAJQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAKNRim9Z5e89Nl4DzXtVh5babURHrSf7f3ec1+qnDorWrvvt5Pccb0n0zhmWk\nRvevv1+cPE2rGTFNZU26PFfxwa5dVkjelI2772nZnX6bbrEUq3o0d678u8wmuDL2ittvVjXdneeK\ncGv4jpJ6U56+kS7+j118+GLXpakzHaWlp9NNY3tv+bbiYiNoQy1y30uyZJlrWmZnuym6q1iIJnop\nyW2Te8bdWnnypQqzZOadokiIpSZntWN5lrxki19vNRxrUeBwnNNd+fJEY6/OejXLn3Xe/wDp9wyn\nE8uo4lqqxblv7lJ26T6vpD5X7G8QycKzeBMbzMRM1/FH/wA/h9QwZ6ajDXLitvWzRgsAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeL45w+dDrZvWv1OWd4+E+j2jX\n12jx67TWw5Y6T2nzifU+rZ1y9eHwzDYxxEy18+DJodXfT5o96vafWPVbjyxDn1OOzHudbM0rt2UW\niI69mVtRXZq5tREb9VUoy2iIlRbJ0UX1VZ6btTLrI7V6yk62M2oisT1c7JmtkttVMUyZp6x0beDS\nRWOvdKijDimvWd3G9pNRMfRcNfvZOb9Hpb0itJeP47k/3hgjaZnbaP1XxWW3T0movbNS0W645nbf\n0nrMPpXs3xamoxdJiLbe/X1n8Uf3fKsOTw4jbaXo+EarJhtGTHMxeJ6xH7Sti9Zaj6x3HM4NxXFx\nDS1mtoi8dJrv2l011QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAGjxLhODieOIye7kr9m8d4eM4to9RwjPXFa0ZIvG9bR0fQXmPbDFvTTZPOJmEWS/V8bs9R43NxLL\nG8eFbePg1bajU5/s0l1ceKLx1hbjwRE9mOpx0y2uRTSZsm3PMw2aaKtIjo6kYo9EXpET0hVLXxYK\nxC6MZvyx1lFs0RHfaPiCnU12pLyHGNDbUajBekWma2npWN3p8+opa20e9LSyZLxExTlpM+vdOdcZ\na9tPS8MyUvFrzWlI6727u1pYxYrbVmb7x+TQx6au3Nqcl7/0rcmW9axGnwZJj1novmxnZXV0fFp4\nZxLBPgTGK8xzXr5fOH0bFlpmxVyY7Rato3iYfNuG2x56Wrqa8s2jz+7Lu8O12bS6jkwzN6THNNI6\ntvrN68Y4rxlx1vHa0bskAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAA4XtTTm0OKfTJ/aXdcL2pyRGjwU362yb7fkJz9eTxxyZJjyltRXzUZK7TFtl9Lbwy06YzrHwa+\nfJFd/wCVt8m0bQ0eS2qzcm+1K/an+zNZFL5M1pjFXeI72ky48eGnPkvNp27+TPU6nHpMfLXaIjpE\nerk5dRMxOfN1mPeisfshW1ne1a1577Y6x5R3U0zze31FOWI6ze0byU098kRlzbxM9qrMlPDpyRMR\nMd5Vt/Ihp5898mWZm1pjftE91uCt7fCI7dWeHDEW3t723l6rslqxWZnasR+SYhFbzhnfxJ2jyeq9\nlcGXWZcmW0zWKxHLaI7794eJx5fpfEKabT8t8l5isddo3l9S4VjrwrRUwzSJt3tav3pdOL6Y6dXD\nj8HFWm+/KsU4NRXPvtWazHquWVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAa+fXYNP9u8b+kdZBsDkZOO135cWOZn4y5Wu4xqctbe9y19Kp4njt6vi+PDm8DFMWybbzPlV\n5PiGtz67UxbNbeKTtWIjaIXYpnwuaftT5tXJT3vmi1pMsrU5qIrG1V1a+5DCa7b9GFbRr5J6Wnbt\nCu+Wmk0m8956z8ZWZNorbfzcbX5rZslazPux3hUt41NTntktObJ13+zX1bek01r4/HzVm0bxPXy/\n+bNfDgjVa2uOY92kdfg6ufJOKvLXtttVVSqbcta2vM7zXtHpLQy5ZtMd+vWd+7Zy3mdJHXra3f0c\nvUarw7zFY5rT2hH1Lavnrgx81p3U49Pk4nE5L35MO/StfNRXR5tXnrS8W67WvfyiPSPi7uLHFK1p\njrtSsbR5Lc4RzsXBaYreP4l45esRD2HD9fnw6evvWvO3Tfr0aGk0U55ra0TFInv6uzgrXFXlx0i0\n77RPlC83Yj+JW7oddqr6vHzTTw9/f6dod+L1t9m0T8pcbFSmPHER3892W0zPuz+jSbVvidkcqmfP\nSel7bekrI4n4dZnPWIrHeYnZee2Wpy8dEaml4npNZblw5qzb8M9JbYgAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAABEzFYmZnaI7yCXL1XGa0jJXT0571nbee27DiXEprp8nhbxG20W8\n5cbD0ikfnKO+urTPvjoZdXqctdsmTaPSvRpWmsdZ6yztfaGplvv3lWW1tyRlz1x0vkn7Vo5atTNe\nY0+1o79V2KsZsvX7Ne5mwxnyTNvsx2iGneM/rCdRSuOsTasTt5kRFtpjqmOH4t4nk7estiMNa97R\nHwhna0iuKTEdmGWa4672nZtRele1N59Zlq6vLOSsYorEc07qcW65euzRvtXvPZy52naZ7ujr6fXV\nrWdukREK8+njHgmZmPc67bq6ivVWhxxgxZLztNrT1mZ/SP4VZs0zaOvfp84WUtNsXLvtv3699+rU\nz7+Jtt5qURqMnPpctaR1rMSw4ZoK57eNk6xHaJRh97Ltt7lo5Z+L1HAPZvVauZ2nFTSzMTzeJEz8\nto6xPfvsZntPZ9rXxabmxzefdrv0j1dXh/BcmstW1qxTHHasR3+b0GPhGl+kWmd64dNEVjf73T7X\ny8vy+Ddx6O3iRakxTH5RXrMw1/lX+3Itw2MFIraN48qRHdZi0cUjmmPen9noox1iO0fNzdXEYrTt\nstcmd9aX0bJ+HePmiKTitO8TMLZ1cVjrMfqpz6ys4pjfrPRWZ9rXXptUit6zO+23VyaRHEc05L1/\nw9J9ys/en1ljqdVbwYw452tlnl3jyjzbmmiMeKtYjpEbLeTXPUU8ee/+qjJpsV5rbkrFqzE1tEbT\nDpYNbW21Mnu29fKWna0KbqTdjXXjld0cvQ63ltGHNPSfs2n+HUbS9c2s2UASqAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAOVxPWe99HpP8ArmP4b+r1EabT3yT3iOkesvMVtN7za07zad5l\nXV5GmM9vVfEstvDx0jtaVVMlq+UJ18b5cMRvPeSuK87bUt+i2Z3PtG7zXpjkzXt6R+TXyTMzvM7t\nydHqZ+zhv1+Cv/ZuqvPTHMfOYaTMil1a1K2vHSLTELq2v+KWzThGo84rH5rq8JzedqR+ZeI7WnOS\n34pYTafWXR/2Pln/AMyrKOCWnvmiPyR6O1y9585lhWJvl557Q6eo4T4dYiMvW3b3UanhldHpJtGX\ne09unmjsT7eb1l4trI2t0hsZfrdNO0bzy+nzU20/+NmkzO9esz+TZxWis9dttvPv+Tn21jjaW8zn\n26bTG3mp1M/Wzv3t0jyWXiKZJmsTERaZhXXDbNl8WaztWenxZLstPp5pau8frDtVrNMM5cfTfpMf\n3aunxxbes9d/R09Dp8ebJi09ptFr3jtt2WyrW9wy1Jx132mK+Xq9PotT0iIU19ntLtExa3T47T+q\n6nBaYvsZstZ+cT/LeMnUi0TXffo1s2m8Ws2/OIMWk5Jib5L328rS2t94Sh5TV4ppklpW6PT6rh+P\nNbebTHyas8E081mZy5P2W6OFhjxNTE/hr/LoRO0Kvo9dPqctKzMxEx1la5t3tdnjnMs4noievcrO\nyZjeFF1OSnNV0OG62cn1GWffj7Mz5w05joovzY7xes7TE7w0xrjPeex6Ua+j1UarBFu1o6Wj0lsN\n3JfQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACrU5o0+nvlt92P3BxuM6nxNRGCs+7Tv8\n2hToxm1r3m9utrTvMsonqyt7XTmcja0u3O6FMfi5t/u0/lzdJM81p9O3zdvHTwsUR5+bfPqOfX1h\ndqV+3O7bs1+T31oqmI3TEM4rvCdkDGIIhlFd2daboS0NXG2bD6bufxXU1vlmu/u4us/N0+L1tTSx\nkr9qk7w89j1FNZMV3jxLzvaJ8mer+LSOZqK2xZotbvljfr/89U453rXt9lse081xZtNjx7TGKu0t\nDHlrevSevaN5Y6+tJ8c7VRNMt63n3ub+6/R54rERMztDYy4a5omclYmfxKcenrjtHLvtPrCnVmdb\neFe3JXmjy6eS/DrMuLVYsta9Mdt++6qLxO+0dEc8UmInr18iUfReHcXrqccb9Z27Q61Lb13eJ9nc\n1Z35rTvE9avY4bTkpG8xEfB05vYxqybc07R281naGMREdoT5JQqy9mply7Q3bV3iXG1eXw7TWSka\nc258t7+tpT5/BjT7MfHqndz12Z+M4lMMKyziUJJiN1WSu9fku23RaOgKNJqbaTU1t9yelo+D0cTE\nxEx1iXmM1Nt3W4PqvFweDaffx9vjDbGvxz+TP66QDRiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAOJxzU73rp6z296zsZMkYsdr2naKxvLyObNOfNfJbvad1dXkaeOdpvsc2yuZVzfbfqybutwu\ns5s8R92J3dvJb3tnO4HSMegtmt3nfZvYp8SZl0z45NfSK7onH1bNcfRFqnUKJr0Y7dVtq7prjEsK\n0XVpEM6028mW20IHK41aPo3J6zs4ODhdcvPnvExFevNXpMOrxi/PlrTee7PLX6Pwa09uaNlKtHg9\ndM3z5d7ReOu02nu0JzZMfblrv5R5uvrcdImZ26T1mYhxs1Os7RH93PZ7axuafNfLitvbaYU3yZYt\nPXs9NwHhui1HBa5LVicsb81onrEuVqNNSuS8Y67dZ6xPZa59Il9uX41vEitImZme3q2Kxbxora0T\nMd/ROSa4Ztkj7c9OafL5LuGYubmyX3iu/TfbdSfVnpvZLT/XZK233+Mbbva1xRXyiPk8pwbH4N6T\nadq5a71n0tD1WDL4tPe6Xr0tDpz8YVnJHWEXYxbqlBedoef4tW0XraO09HdyztSZcbUz43C+ee9b\nSVMaeOfqq7+jGckQ1Yz7+7v2RN/WXPXZPjci2+2yyJaVMuy+uSJlA2d+pNoVRbeDcSxyTE+TDDlt\npdRXLTynrHrDOyiyZeVFnY9TjvXJjres71tG8MnJ4Nqt4tp7T1jrV1nRL1x2cvABKAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAHJ49qfD09cNZ97JPX5PPw2uI6j6Vrsl/ux7tfk1mWr7dOM8iLdm\nvfebREefRsWldw7SxqNbWbR7lPesrn3Vteo7dYjDpMGCvfbeXQ0uLlxRLRxROfUc34p6fCHYrXlr\nEejqrjY8uzCYW7MZjdVKqK9VlaxCYrsnYExBMRMJRPZA8/xPHtmpP9W2xx76vhWOInvt/C7ike7N\nvwzE9kcapGfhlevTaFbFo8RqJ5vy8/RoW09ek0msxHfp3dzNoLzp4zUmZpMbT8HJyYJi20X2n0lh\nZY1li/RaidBF4w2mK3jrHaFGp1lN+tptPp5IjBkid5mIp16TKu0abBPv33vPlM7z+iPdFNcWXU5I\ntkrNce/b1W5db1nTaf3ax9q0fxDW1ebNk2phty1mOu09VOm8W19orEz23j1TwfSeERFuEYMddptW\nd43dvBn21eKJ75KbW+cf/JcTgMxXTb3nbljz+TpcPmc2uyZO1KRtVtGVdi0bx07qJnllsRO6rNTe\nN4XVamsy8mnvPwc3R2jPwe8TPbdlxXNOPSZfhWWpwO85OFzv57qrODkzeHntSe8Sn6Rv0a3EZ218\n8nXekfr1a0ZLVnqx19dWb6demXybOO7lYMvNMdW9S/VVLo0us7tPHdtUtEwJiZU3jq2Jhham8CVG\nPNODNTJXvWd3qcWSubFXJWd4tG8PK3pPd1OB6veLaa89Y61/u2xfxh5c/rsgNHOAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAANLimq+i6O0xPv392rdeZ4rq/pOqnlnelOkIt5F8Z7Wj27I2I6sb25YY\nV1ImY3dbQ08LRc23vZp2j5OJG+XJWle9p2h6HHtbJXFT7OOIpX+7TxT31j5rycdTh+Dpz+XaG/sw\nw18PHWseULN2trBE9UcrJKBhFU7JAQi0dEomegNDUYovM7x3jb5tO1ZvpbaTLtzRExWfWPJ08kbT\nEx5NXWYYyV5omYtHWJieyeDzuizfRs19Jn6TM7Ru1uMcJxZqTkw+5f4ebqa7SV1MR4tdrx2vEfy1\naxqsNOTLjnLXytVXi3Xj8+nmsxTLM16d5npPyUzpekTtSK+U7vS6vQ/SYmK1vWPS1HOn2dvvvvE/\ntDO5XlcO+LbfHSd/W3o6/BdDOXPTnj3Kz38rS6Wm4FNrRyRzTH3p6RH/AKvR8L4dXSzE3jmtHn5I\nmbfqLV+m4dbLSsZInHjr3iI6zLpYaxS01rHuxHRHiT9mv6s67Vj1aqL6326MrWiYa+/Q54BxPaGe\nXRZpj8MquB4+Xg8zPnB7SX30to379GxpK1xcHiKz5IS8xr8PLPixH2bftLTy05o6dHYyVjLhy0t1\nizjZa3pMVv3iO/qz1G2L+NbSajbNyW7xLsY8kTDz+fJXFqKZN4iZnafi6WHL0iYlStI7OO+7axW2\ncrFl7dW9jvE9ULN+J3ZbdFGOy+AYWpEqN7afNXLj+1Wd23KrJVMvCzseh0+auow1yU7WhY4fCdV4\nOadPefcvPuz6S7jol649Tl4AJVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAV581NPhtkvO0R+4NPi2\nr8DB4dJ9+/7Q83Po2NTqLanNbLfvPaPSFDHV66sZ5ET0hRknyW2lTtMyouz0c8usx2n7s7vScKwx\nzc1vu/y85p+maJh6Th+SOWeveXR4/wDLm8v+nX5mUWa9bbrInolmu5jdTNkxYFk2Isr3TuCzeGMz\n+THdEyDDJO9Ja823rt2XWnya946pGvktDXta0ztWu/ybvLE9dkcoOf4GbJPWK1j49VmLh9JtE33v\nMevb9G7WsW8l1ccREISophiJ2jpDYpijbaOjOuOJ8ujOdqxsgVcsUjaETYvbaFFrgu5lVsm0yUtu\nryg43H5m+GIj1XcJzePoL4pnrWGtxmfchr8JvfHS1622if3QljzTTLes+qrNjrkiYtCzPMxnm095\nYZJ6boS5teB49Tqscza97VtvWvlv8V/FOF34RrIxTM2xXjelp/eHoeA6XnzReY3ivX/0dfivDcfE\n9HbDbaLx1pb0lOs+jO7K8Lis3cN+0NKcd9PmthzV5clJ2mF9J9GHHVL108dm1SznYr/Ft0tuhLb8\nmNohFbMhLWy0mJ3rPXvDvcO1karBG8/WV6Wj+7kWrvDDBlvpdRGSnbzj1hpjX4z8mOx6UYYstc2O\nuSk71tG7Ns5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACZ2jeXneJ62dVl5KT9VTt8Z9W9xbWclPo+O\nfft9qfSHEU1pv48ftYST23ZTDC/p0YtlVuvVjMbM5+LCZjYGWGdrTPxiHY4ffaf3cjTxz1v6xMS6\nOlty2iXVj/Dk8n+ndrkhnGRo1v8AFdW3RCrZ5uiYsqrboncSu508yjmZRYQt50TfowYTbYGVrKrT\nuTZjvukQnYhMIGVY2ZxPVWyrHVCWzXpVXkt3TE7Va+W4K7X3jv1auTNy3jdba0RZpamfroQN7Hk3\n6wr1GTaN2OOJiu6Mu98NvgDi8Wy74d/yZ8PiPAiO2zU4nb6qIn1bugjfFE/ASp1ke9u15mbbRDZ1\nMb823kx0Ontn1OOkedoJCvT8I03gaKsz9q/WW+isRWsVjtHRKyrhe0XCfpWL6Vgr9fjjrEfeh5fF\nfeH0V5Dj3DPoOo+k4a/U5J6xH3ZZ7z3228evytOk7NvFbo0cdols47bSybt7HbddHVqUs2aW3Qnq\nxVeu8LILR3SlZw3V/R8nhXn6u0/pLuPMXjeHT4Zruf6jLPvR9mZ8/g1xrvpz+TH7HUAaMAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAABRq9VXSYJyW79qx6yvmdo3l5viGs+maqYrO+OnSvx+KLeLZz2te1rZL2v\ned7WneZYWnZl5K72YV1xEyxmeqJljzIEWlVkszvbZp5soN3h2SJz3pP3odCnuWmPRxuERfJrZmtZ\nmtY96fR28kbX3dXj/wAuTyf6bmK+9YX1s0cNtm3Sd4LFY2K23W1s16StiUJW7bp22RW3RluBuruz\nmWEgrmCGWyNkoExKE1QlPmsqRDKeyBjaejWy2W3ttDUyz1QKslvehVqKTNosyyTvELabXptIJpaP\nB39Ia2mz+JGpr51jdZefDx2hzuHZObNq58poJaGtjxJ2+LoaKP8ADRPo5+T3skx5OhpOmC0fBNQ0\n5yTbn+bt8A0u9raiY6RHLVwY62mI6zMvaaHBGn0mPHt1iN5+aYVsACBXqMFNTgviyxvW0bSsAeE1\nmkvw7V2w5Ote9besJx2er4rw2nEdNNekZa9aW9JeQjnxZLYskTW9Z2mJY7zz26fHrrdpbZsY7NGt\nmxjvso1b9NmUwpx33XRO4K7VUTE1nmrvEx1bVo2VWiJE/XY4frY1WPlt0y17x6/FuPM0m+HJGTHO\n1qu9pNVXVYt46Xj7VfRtnXXL5MfzexsALsgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHM4jxOMFJphmJv529Dq\nZLfjDjPEIx450+K3v2+1MeUOHSOWFc3nJkmZnf4yujpVlqunOeFpV2nctLCZUXRM7MJtsWlRkv3Q\nky5NmpWt9RnrixVm17TtEQnJabXisRMzPSIew9n+CRoccajURvqLx5/chfOest642OGcIpoOG2w7\nROW9d72+LQvXevyejcPUU5M+SvpLeOataraw2a0dLbLqTtK1G3Es4lVWWUSoldFtmcXUbpidgXzK\nGEW3TuCUSncnsDFMMLSms9EC6J6FpVzbZE5ALy0809ZbFr9GtfrEoFMzuuwz0Ueey3HbaBLDXe7i\ntMOfwWnP9I+NZbuttvhs1uBRtXPb4SDm3iIvf57N7Dbl0VrS5+XrltEd+Z1Jx7cNms9N4TURRw3T\n+PrcO3WszEvZOD7P6aYiMlvu16S7y1QAIAABxOPcLnUY/pWCv1tI96I+9DtgmXl68Biy7/NtUu3+\nO8HnFa2s0tfd75KR5fFyMWTdhrPHVnX9R0cd21S3Rzsdm1iuqs256wrmGcT0RYSx5d047X02SMmO\nesd49YRE9WcdSXhZ2O1p89NRji9J+cei1xMc3wXi+KZj1j1dTTaqmor06WjvWW+ddcu8XK8BZmAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAMMmWmKu952UZ9XFZmuP3revlDTtzWnmvO8q3XGmfHb9ZanV3yxtWeWn7y4es\nvPNtDqZJ6Ts5mppvdl/XXRMyfGvSNlu/RVvtOzLfoipLT1VTKbSpvfogRkvtDVyZOhkyvQcA4Dzz\nXV6yvTvTHMfvK+c9U3rkW+zvA/D21urr789cdZ8vi9KDb45rejl8Rry6iJ/FV1HP4vXbBTJEfYt1\n+UpiHM295bXsqrO9l8QkZ0lZEqqLeyBZHZLGvZkhIndADKJ3TMoqWQMZ6pjsxll2jsCLSrmU2lFY\n36gieyu0LJk3jbsga0wdqzK20QpyztQGprL/AFMrOE05NLkt6qdVWZxNrSe5o9vWBLiUjnzXn0vL\nq555dHt8HOwV928/1z/LpzXxbYccRvzTB+jucOwxh0dI22mY3ltIrHLWIjyjZKyoAAAAACJiJjaY\n3iXleM8InR5J1GniZw2n3oj7s/8Ao9Wi9a3rNbRE1mNpifNFnVs65XhcWTdt47bnFuF24dm8TFEz\np7T0/pn0a+HJux1OOrOux08d1ndqY7tillVkzExLOk7yd4YxGwluViJhE45raL0na0dtlWO0+bZr\n1TKi+2zptZGTamT3b/tLacvJjiY3XaTWdYxZZ6/dtPm1zrv1z78fPcbwC7EAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABhkyV\nxUm152iAZWtFazNp2iGhm1Vss8uP3aevnKrNntqLdelI7VRHRnrX/HRjx/tZREVjZXeybW6KbWZt\npCZ6S08tN7Nmbb7zCrJtyoS5145bSx5mWafelr3tsKmS/o08uXyhlly7RPV2+AcBnPNdZrK+53pS\nfP4ytnPVda4y4BwHxOXV6uvu96Unz+MvVxG0bQRG0bR2G0nHLb2gCUDX12LxtFmpHeazt82wT1gH\nmMN4tWs+rcr2aEV8DU5sM/cvO3yb+O0csLUTSdrLphRE8tlkZI7Atr2ZMazDJVKTYSCawi7Ksq7z\n1QERvLK3ZGPrKbyCrbdnMcsbeaa18/RhvvM7oGEwTG0JmYYTIML22a2e28xELM19oURPNO4lOem+\nn3ZY5+prVnMc2GYU4/L4A0a15cNf6rz/AC6fC6+NxCPOuOu/5tHJTbHj+F5/l1+BYumXJMd9o3/d\nMRXYASgAAAAAAABhlxUz4rY8lYtS0bTEvH8R4ffhmo6bzhtPu29Pg9mq1Gnx6rDbFmrzVsizq2df\nzXkMWTeIbNL7tbXaHLwzUctvexWn3bmPL8WFnHVL326VZ91MfFVjvvVlz79kLrcf2m7j7bNHH3bl\nJ2SirLQoy4t1++7G0dBC/RanxI8PJPv18/WG241+alovSdrV6w6mDNGfFF4/OPSW2b1zeTPL1aAs\nzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAVZ9RXBTe3WZ7R6iZOpzZq4ac1p+UermZMl89+a/byj0Ra9815ted59PQ32hlrXXRjH\nDpCLX6ML5NlNsm/ZRqstfdXzbsZt06sLZNvNB1Za8RDWyZdo7q8udq5Mu/mIMt4md2lmy7JzZuWJ\ndHgfBL8RvGo1MTXTxPSPx/8AstJ1XWpIs4BwSdbeNVqq/URPu0n73/s9hEREbRG0QUpWlYrWIisR\ntER5JbSccur2gCUAAAAPM8Sry8Uyz67fwuxbzVPGsE49XGbvF42V4M0TEL33ERnktsxpk3sumK2j\nadmFdPFZ33VS2Mdui2J3UU6LYlFSsN2O5NkCyJ6K7T1TEsbAsxdpReerKkTFGMxvYEz0rsqtbbpC\nb2VT1QEzuwtbaGUxspuJU3neWdKoiu8rq12gCI92YatLcublnzbEz1aOptyZqTuDHLfxN6R0+t5X\nqdJhjBp6UiPLeXl9NSMnEKxHa1+bb8nrlvxUAAAAAAAAAAABTqtNj1eC2LLXeto/R43VabJw/VTh\nydY+7b1h7ho8V4dXiGlmvbJXrS3xRZ1fGv5rzeHN02bEW3cys3xZJx5ImtqztMS3MeTeGFjqlb2O\n8btql3NpbZtYsnSBLeiWfdTjtutid+ghherHS5p0+f3vsX6T8Fkw181d4lMvEWdnHaGnw/UeNh5L\nT7+PpPxbjdyWcvAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAo1Oprgr63ntAmTqdRqK4K9etp7Q5d7Wy2m953lNrWyWm953mVd77R0\nZa1104xxlN9lV8qnJl2a9s3xUXX2ybsJyRDWtl3YWydEC+2VRkzeW6q+T4tbJm+KRdfK1cmWZnlr\nvNp7RC/R6HU8SycmCk7ed57Q9ZwvgOn4fEXtHi5/O9o7fJaZ6z1uRyOEezVstq6jiEbV71xevzer\nrWtKxWsRFY6REeSRrJxz22gCUAAAAAANbX6aNVpL0npMRvWfSXlKamsRMVvXm+EvZXjmpaPWHzfL\noNRjzXicfWJ8phfPxFejx72x7xMzK+sXiNoiXlq+Pi6fWV/VfTNqfLJl/WTg9Pji8R70LqvMV1Gq\nj/zcv6yz+lanzzZP1lWpelTET6S81Gp1P/Gyf90s412rjtnyfqql6asREdWM9+jz9eJ6yP8Az7uh\nodZqMt458tpB1JvEViI3/RhzRt13/R1MNaziiZiJn5K9ZNceKZiIiQcu/WekT+iYrWI3lzdTrs+8\n8uW0fJzcur1Np/zsn6g79phVaIeetqNR/wAXJ/3SwnUaj/i5P+6UD0ldonum161h5mNRqP8Ai5P1\nlNtRqJjacuT9Qd22WN5aGeZyZd/KHJy59RHbLf8AVq31Gp/4uT9ZEvS8Lr/vSs2npzRtL1z53wK+\noza/HW2XJNd99pmX0Rb8VAAAAAAAAAAAAAAcHj/C5yV+l4I9+v24jzj1cLFk8nu5jeNpeW41wmdL\nknU6ev1Vp96sfdn/ANFdTrXG+eq1q5F2LLtbZoY8m8d11bbSydErsYsm+zZrO/zcnBm226uhiyRK\nEtrvCrJDOJTeu8A1MWX6Lqq5N/dnpb5O5ExMbx2cPNTeJb/DM/iYPDtPvY+nzhri/jDy5/W6AuwA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAa2p1UYo5adbz+xbxMlvqJ1OqjDHLXree0ejmzNrWm953tPmTPWbWneZ7yoy5YhjrXXTjH8s75N\nmtkyxt0VZM2/m175N1V03yTKubMLXVXybeYLLX2VXy7eam+b0bOg4VquJW+rry4/O9uyZOq3UjVm\n9r25axMzPaIdvhns1kzbZddM0p5Y47z8/R2+HcF03Doi1a8+Xzvbv+TotJnjDXkt+K8ODHp8cY8N\nIpSO0RCwF2YAAAAAAAAACvUZYw6fJkntWN3k8dfHz2vLucdz8mkjFE9bz1+UOZosX1UzPm0nqI/W\nMYo9FlcPNklfFGeH/NshLGun+Cz6PtHZtVZWlRLS+jxPkRpIn7rdoupHTdA5s6SI+7H6Mfo+32Y2\n+To3neSIiZ7A0IjPXpXLePlMotGW3272t85datKzHZjbTVnsDj+FG/2Y/RlGP4R+jo20u7H6N1Ql\no+H8I/REY957R+jpfReiK6eOYHLtj2tttH6KrY/6Y/R2c+kjeJiFVtLG24hxpw7/AHY/RRkw9O37\nO99Hrt1YX0tfOBLjcGp4XF8c+u8fs9c4dcVcGemSI61nd3IneN1orQAAAAAAAAAAAAABFqxes1tE\nTE9JiUgPKcX4RbRXnNgiZwWnrH4XPi28PdXpW9JraImsxtMS8pxXhF9DecuGJtgmf+1TWW2N/la1\nL7N7T5e3Vy6W3hsYcvLbqzbO9jvvCzvDR0+XeO7crO6FmGSvRThy/RtVXJ92elvk2rRvDUzU7pl4\nizsd2J3jeBpcNz+Lg5LT7+Pp+Xk3W7js5eAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADs0NTrN96Yp6edkW8Wzm6+LNTq4pvTHO9vOfRoWtt\n1mes95YWvs1s2fZldddOczLPLn2ju0MmebT3YZc2/mpm3qqllN1drsbZIhr3yzvtHf4AsvlYYseb\nV5Yx4KTe0+UQ6nDvZ3UazbJqd8OKeu33peq0eh0+hxcmnxxWPOfOfm0mP+steT/ji8N9mKY9suum\nL37+HHaPm9DSlaVitKxWsdohI0Y22gAgAAAAAAAAAABXnyRhw3yT92Nwef4xm8bVzET0rPJH5d12\nCvLhho3rN9RWs9Z23n5y6O21YhrVYbdGOCfrrLPJRpv863zVS6FS09SvZj3lVZZRdPSqmnSWdrIE\nebOkK4ldTsgW1WKqd1oMZhEVZyRAImOjGI6rJ7IiATNd46qL02bHkiaxaoNGY2n4ImPgtyV2n0Vo\nGvlx7x2beiyTk08RPevSVUxux00+Fn2n7N+n5rRFb4AAAAAAAAAAAAAAACLVres1tETWekxKQHlu\nL8InR2nPp43wz3j8P/s5dLveWrFqzW0bxPeJeV4xwmdFec+CJnDM9Y/CrY1xv8qvTZ+WYdbDk5oh\n5zHk283U0eo3jaZZ2N5XYjrCnLSJhOK+8d1kxvCqzSwZvousrb7k9LfJ3nB1OLeJdLhufx9LEWn3\n6e7LXN9Ofy5/W4AuxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAETaKxMzO0Qi9646Ta07RDmZ9VbPbaOlI7Qi3i+c3TPUaqcu9adKfy0722ZXvFa9\nXO1OrjrESxt66ZJmcjPUanlidmhkzTZVfLN5VWvsC2b7R3U3yqrZZtO1esz2h2+F+zWTUcuXXTNM\nfeKR3n5+iZLVbqRzNJo9TxHLyaekz62ntD1fDOA6fQbZL7Zc/wCKY6R8odLBgxabFGPDSKUjyiFj\nSZkYa3aALKAAAAAAAAAAAAAADQ4pl2pTFH3p3n5Q33E12Tn1eSfKscsLZ+orS00eJqbW+Lfnu1tF\nXaJnZsz3WpCfsyp00fWSvmPdVYOmSUDd8kR3InoQosy7JmUX7MdwZ17ro7KKT1XRPRAsrO0rYndr\n79V1ZBaQiJ6JgCSIJASwrO07MpV2nqBlrv1a1o2bf2qtfLXaQUTO0sb05o3jv3ZXhjS20xEphW5h\nyeJjjf7UdJWNKLziyRePsz0lux1SgAQAAAAAAAAAAAAAADG9K5KTS8Rato2mJZAPIcU4ZbQZuekT\nOC3afT4NXFkmlntc2GmoxWx5K71tG0vHa/RX0GpmlutJ61t6wrY2xr8dXS5uesN+tt4ef0eaa223\n2dnHk3juyreM81OaFGiy/RtZET9jJ7s/2bdutd2jqKeic3iNTsd8a2h1H0jTVtP2o6W+bZbOO+gA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABje9cdJt\nadohGTLXFTmvO0fy52bJfU23t0pHaqLeL5xdK9Rnvqb+cUjtCi94xxvK3JetKuHrdZvaa1ljb10y\ncnIs1Wt3naJc++TmVWvMz1YWybfMGdsm3eWek0mo4jm8PT0mfW3lDf4V7P5tdMZdRviwfvZ6/TaX\nDpMMYsFIpWPTzXmf+steT8jn8L4Dp+HxF77Zc/4pjpHydYGjC3oAAAAAAAAAAAAAAAAADG9opS1p\n7RG7zszN6WtPe0zLua+3Joss/wBOzhzG2OsL5+IrY09dsSyYRijbHEMvOChb7KjF0yS2LQ169Mso\nS24noyrPVXWejNVKbTuw3T3REdQWU6LYlVvsyiUDPfqupPRr79VuOQX1lZEqoZxIMksd0gT2VT0l\nbPZVbuCaW8i8bwr32WxbcGnkjaZa9p2ndv5qbw5+aNugLItF6TEtvTX5sMb969HMpfazc0d9stqe\nvVZDdAQAAAAAAAAAAAAAAAADV1+iprtPOO/2u9bektoB4TJTJpNRbHkja1Z6uto8viVht+0HDvpG\nH6Tjj6zHHvbecONw7Ltfkmeqmo6Ma69DXbbZTkr1mGWO3RneOaGbZRoM30fVzSelMnT83aef1FZ7\nx3h1tBqfpGnjmn369LNc3sc3kzy9bQCzIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAa+q1dNNXr7157VhGp1Xh70x+9f9ocy283m1p5rz3mVbrjXHjt91lz\n5c9+fJ1nyjyhdM8lZlOOIiqrUXikd+kMreunnI5XEdX4dZiZcG+XmtNl/F83PeeWWHDOGanieSKY\nq+5H2rz2hMzWd1Iqx1yajJXHhrNrW6REeb1nCPZumn2z62Ivl7xTyr/6uhwzhGn4Zj2xxzZJ+1kn\nvLoNJnjHW7TbbsAszAAAAAAAAAAAAAAAAAAAAaPFrbaSK/itEOXt0rDf4xb/ACa/GZacRvaF58Q2\nIjasQnzPIhCU92tMbZGzHmotG10C6nZkwpPRmipIllEbMIZIE7solgmJBnCyk9VMM6z1BtVllEqK\nz0WRILYlluriWcSDJVbusV27gwInaSWM9ECyZ3hqamnSWxFmOSOaqRx725bNnSZNs9J+OynVY+WZ\nYYr7TE+nVaIr0Ais81Yn1hKAAAAAAAAAAAAAAAAAABExvG09peU4nov9n66L0j6q/WPg9Y1OJaON\nZpL0+9HWs/EWzeVz9PbmrEtnyc3h9reHy26TWdnSr2YX6657ijLXpLX0+onSamL/AHJ6W+Tbv2aW\nekTv16JzeI1Ox6KJiYiY7Slz+E6jxdN4dp3vj6fl5Og2clnKACAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACZ2jeQRMxEbzO0Q08uqtkma4ulfO3r8lefUePMxWf\ncjy9WvlzVxV6T1Z61/x0Y8f7Wc7Ur1lqVy+LqOWJ2hp6rXddon5rOF1tfmz5OkT0qzb8dWbxjp1c\nbiuuilJ5Z6r+IcQrixzEy8zl1E6rNt1tMztFY81sztU1eRucN4ffi2p5esRM72n0h7rS6XFo8FcO\nCkVpX082nwXh3+z9FWLxHi36328vg6TZyW9ABAAAAAAAAAAAAAAAAAAAAAADj8Unm1tK/hqppHvw\ny1k8/EMk+m0GOPeafiFpCZYwolnXspvHvLa9mF46gmnZmwozRUiUCBKYYsoBLOFbKAX0llEqqyzi\nQXRLOJVRLOOwLIljZMEgrlhKyYYTAK5nZPN0RZjugUanHzVlz6xtLq361c+9eXItPpXX0dubTU+E\nbL2lw2++O1fSW6m/VYAISAAAAAAAAAAAAAAAAAp1GbwcfTreelYEydcuMcRrM/L9nnlsV6wqpi2r\ntv133mfWVkRyRtEdGFva7MzkYZNoamWN4bV4mYa9qztKIujhVppxGI8r1mJegeZpknBqKZY+7L0t\nLRekWrO8TG8Ns/HJ5ZypAWZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAADS12fp4VJ6z9qVuq1HgUiI+3bpDl589cOKZmevqprXPTbx477rDJlrhr1nq4+s182tMRP\nRqaziXiZJrWekNG17ZbxWJ336M5LXRbI3dLTJrs07RMY6fan1dHLrowY+X7MVjt6N3R6Kul0EbWm\ns7bz8Z+LnabQX43r7Y53php/mXj+Dnv0f1JO1x/8ZxbUzj02O15mfLtD13AvZqnDds+pmMmo26el\nXX0Wh0/D8EYtNjilY7+s/NstpOOTW7QBKgAAAAAAAAAAAAAAAAAAAAAADG88tLW9I3BwJtz6nNf1\nvK/DHVqYJ3pzT5y3MPZeojOWMQylEKpTVjZnDCwkqzYQyRRICATCITAJZQxhMAshnEq4ZQC2srKq\nqrIBZCWNZZgwswmFloVyCu0dFcx1WyrtCBhv5NTPHXds2U5o3hIz4ffbPt+KHUcTSW5c9Jme0u2v\nVYAKpAAAAAAAAAAAAAAAAYZctcVOa35R6tLrltN795/YvknNqrfhpPLH92V5isd9mWq6fHjk6rn0\nZxG8KK5Jm/wbVZiYZtqrmkqL023bkxvCiY3lJHNyRG81mHS4Rn5sNsNp64+3yaWaNrzOzHBl+i6q\nmT7s9J+S+ay8mex6EIneN47SNXKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAImYiJme0JafEs3h6fkidrZOn5eaLeJk7eOdm1Hi2vmtPTry/CHmOJcUvmvOPF1n09Pm\n6HF9ZGm01qxO3R5vSY7XwzmzTy47zzTEd7en5Mfvt2/PURWdo3tvPrPlKymbktFqTtMTvHzbOLDG\nf63JXbFX7FdnoODcDprZpq9TjiMMTvSn4vj8l5fxnrk91saPSa7i2hpOfbTVt5x1m0fLydzR6PDo\ndPGHBXasd585n1lsRERG0dIF5OOe6tAEqgAAAAAAAAAAAAAAAAAAAAAAADX11+TRZrf0y2Gjxe22\ngtH4piP3TPpXKwxtjhuYo9xq442iIblI2pC1RET2ILd9kxCqRjZmwlCSEohIJAQAAJZISDKGUd2M\nMoBnVbVVCyAWVWeSuqyOwIlXZZKue4MJV2WWYT2QKbKL9YlfdRdIo35b7/Hd3KTzUrPrDh27uxpb\nc2mpPwX/ABX9XAKpAAAAAAAAAAAAAACekTIp1eTwtJmv+GkyJn1oafeazbfpMzLR4jq/o8b823zX\n6XNF8ERCvTcNpxLV5LauvPhx9Irv3lhztdtv8TtaWLicXrt03jzjzb2k1nid56ty3s/w+a7Uwzjn\n1raejlarhmbhl/FpbxMO/fzj5p/ixSeXOvTtRfeI280ZI26tfDm3pWe63LaZx7qtGvniJ6tPLvOK\nfOa9WzbJvTbza02jl3n5SSljscK1MajSxWZ96nSW88xw/VfQ9XMT9nfa3yemid43jtLeXsce88qQ\nEqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADia3UTm1l4j7OP3Y/u\n7Vp2rM+kPJW1PhYcmS0+9MzKm/jbwz31weMzbV8UppazPL9q0/BF4rk1GLDSNqxPWPhCnHmnNrtT\nqPKteWPm6U6OdHaZvO+SaRNvhv12Ub/q3FhtrNVj0uKOt56z6R5y9zix1w4qY6RtWsREOJ7L6OKa\nS2rvX6zNM7T6Vh3mmZyOfya7eACzIAAAAAAAAAAAAAAAAAAAAAAAAAAczjVvqMVfW/8AZ03I41bf\nLp6/OVs/UVrY47NyOzUxd4bUJpEbb3Z7IiOrKIVSjZhMLJYyhKIgmGUQSDESIEbJEgQmCITEAmGU\nIiGUAyhZVhDOoM4Wx2VQtqBKuyyWEgqlhKyyuyBVaGtkbNmvk7A15l1eH2300R6TMORPSXT4ZO+O\n8fFefEX63gEAAAAAAAAAAAAAAAq1WPxdLlp+Kkx+y1Fvsz8gjhaDauGK8sx07y3OE3m1tT6RaP4c\nvU6yMNKUx73zT0ilY3l2eF6a+m0kRl/zbzz3+Ez5M8z26fJruW6wzYq5sV8d43raNpZjRzPPaTmx\n5b6bJ9rHO3zb2WJ8GWPEscY9bgzxH2t62n19GWW0eHOzHU5XbjXZ1x8WTnz2iZ7S2M1IjH2+LX0V\nKTqs8zO9ot0j8nUthi1J3UaOFMTfLFo6xMbS9BwHWTqdHOO8+/hnln5eTjYMFo1WTH5VnePzXcIm\n2k4zlpPSmXy/hfF5eMfJns69OA2cgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAADG/2LfJ874rW845mubliY7bPoto5qzHrDz0+yePNF41OotaJ7RWNtpV1OtfHqZ715fhu\nj8adNpcVfeyzE2/vLuanhOu1nEctIxTTFa/+ZPbZ3eHcF0vDbTfFE2yzG03t32+DokynXl9+leDB\nTTYKYccbUpWIhYCzEAAAAAAAAAAAAAAAAAAAAAAAAAAAAcXjE/4zDH9M/wAu04XF5/3jj/0f3Wz9\nRUYmzDWxS2I7FSyjuzY1ZKpRKEygEwiWUIkGIk2QJNhKQhMIhkCYZQxhlAMoZwwZwgWQshVCyATL\nCWc9ldpBhZXLOVdpQK7NfJPRdaWvknoDVvPvOnwuel4+TlXn3nS4VPvXj4QtEV0wAAAAAAAAAAAA\nAAAAAVV02CmTxK4qRf8AFFeq0AAAanEsfPpZmO9Ji0NDLfkwdOsulrumiyzHlVzJrz4Ovoy26vB8\ncTBa9NffLtMY77Rv8Yegx5ImkKdJoY1HC81Y+3OSbVn0mGGkmbY45u6tnrrTOu2xGO0RxCd+nNVj\nqKxTV1vH2pjaGtnyzXXYdo96ZmGXEMk15b7/AGZiVerWPTYckZcNbx5wzc7hGbnxXxzPWk7x8pdF\n0S9jh1OXgAlUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAcPjEf4/FP9H93ccXjMf4vDP9Mx+62fqKrx+S+GvibEFSsqyYwlVK\nZYsmIMoRKYJQIPIEiQ2ATCUQygCGUIhMAyhnDCGUIFkLIV1ZxIMpVWWSrsCuyqyyyq09ECq8tfJK\n66jJ2Bp5J6upwn7dv9Lk5J951uE/av8AJaIrqAAAAAAAAAAAAAAAAAAAAAAq1Mc2myxPnWf4cmtu\nXT9fR0tffk0WSe28bfq5Wbamm3326MtunwfK6PCv/AxPraZ/dz9PO97/AOqf5dHhdZrw7Dv3mOb9\nXOxRFM+avpe38mvkPHf/AFWlrKba7Tzt99ZxKkfR7euyNXMTrtPHfa0z+zPiM/UR8Zj+Wbdu8HpN\nM2bfzrV13M4dO2pyR61dNvj44/J/oAWZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj8bj63BPzdhyeNx0wz8ZWz9RWri7Nmv\nVrYu0NmqaRZHZlDGGSiwxZSgCEkCBCQSCQBMJRCYgEsoYx3Z17AlMIhlCBnDOGEM4AlhZZKq4KrK\n7LLKrIFN2vdfZReAaObu6/CO9vk5OePR1uEd7fJeIrqAIAAAAAAAAAAAAAAAAAAAAGtxCk5NFliI\n3mI32+XVyNTyZOHTee946PQKPoeDffw4777eW/yVs60xv+ZxOnr4Okx1t05KRv8Ao41Z5q3yed5m\nXY1szXRZ5jvFJ/hxItP0aOSN9q7yrtr4f2tHFM5+KT16Yq/vK/iGSbXw4vO14UcPx5MGfNbPG18m\n1oj4THRsTw7VanPXVYpi3gzMcnrvCnG11JOupwuN8+a3pEQ6jT4divjxWnJExa09pbjbM5HHu90A\nJUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAHM41H1GOf6nTc/jEf4Ws+lls/UX45uGekNujTwdm5RNIthKIZKLDFlsiQIShIC\nEgCUJ7AmGTGO7IDzZQhMSDJMMYZQgZwzhhDOATuqssmVdgVWVWWyqtCBTeVF19lF+wNLNG7q8I+9\n8nLyupwnt+S8RXUAQAAAAAAAAAAAAAAAAAAAAAAItWL1mto3iY2lyrcLyUxzix2ia2nvPeK+jrCL\nOrTVnxpanhuPPemSs8l6RtE7dJj0ldpNP9GwRSZ3neZmV4cR/Vs4AJQAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANHi1d9H\nM+kt5ra+vPoskfDdOfqK4mn7Q3aNHBPZu0W0RdDOGFWcKLCJZeTGQQlCQSgASBsCYZQxhlAJTAmA\nTsmAgGcM4YQyjsgRLC3VnaVcgwsrt3Z2V2QK7tbJ1bN5a9waeWO7p8Knt8nNyebpcK8vkvlFdQBA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9RXmwZI+ErEWjesx6wQeZwejeo0cccuW8\nelpblJaaRGxVnCuss4ZrMvJEgCAASISCQIBlCYYpieoM0wx8k7gzIRueYM4Z79FcSy3QEsLJmWFp\nBjaVVpZWlXMoGNmvkXXlr3kGtknu6XCf7OXkl1OEdl8orqgIAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAHmskcmtzV/rls0U62OXiWX4zErcc9GmkRfWVkSqqziWayxCPIANwBIhIJSxS\nCRG6dwZwlhEs4BluMdzfqgZxLLdXuy3AmVdpZTKuZBjaVVpWWV2QlhZRdfZRcGpl7urwfrzfJy8r\nrcH61vPyWitdMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHA4nHLxKZ9awnH2ZcY\njbW459aq8fZpfiI2IZwrqzhmsz3Ebm4JN0AMhCQSIASndiAziWUSriWcAyRujc80DM3RCfIETLCW\nUsZEsJYSslXZAwlTddPZTkBp5e7r8Gj6rJPxhx8k9Xa4PG2C8/FaK10QAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAcfjcbZMFvnDWx9m5x2PqcNvS+zSxT7sNPxH62YZQwqzhRZO6UCB\nKUAJTux3SDIRuAncQAmJZRLBMSgZ7iIAZRKd2DICUSlAljLCYWMLIFVukNfI2bNbIDTyT7zu8Ijb\nSz/qcG/2nf4T/wCE/wD2WnxWt4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHL9oL\n+Hw2cm28VvEuPptfgyVj6yIn0no7/FtJfW8NzYMe3PaPd39d3iMug1WktNc2C9dvPbeP1aZ9xF+v\nT471tHu2iflK2HkqWmvaZj5Surqc9Ps5bx+alTHqYHm68S1Vf/NmfnC2vGNTXvyT84Ql6A3cSvHM\nsfaxVn5Ssrxyv3sM/lKB1xza8bwT3pePyWV4tpZ+/MfOEjfGrXiGlt2zV/PotrqcN/s5aT/+wLRj\nFontMSlAlKEgndO6IAZQljDIEgeQljLCzOVdkCu/SGrkbF56NPNeKxMzMRHxENe0+89DwuNtHHzl\n5PJr8NcnLW3Pbf7r1nCZm2gpae8zMrz4i/W6AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAETETG0xukB4HVaeMHEtRi26RedvkyjBSfX9W77QYvC4xz7dMlYlrU7M929dWJLFc6aPK0q\n7YLxPS0S22FlP6q38Zac0yR92s/KVc3tHfFf8tpbcsLRvB/dR/8ALLVnU0r9uL1+dZI1mnmdvGpv\n6TOy6ym+Oto2tWJ+cJ/tW+KLK5KW+zes/KU7tG+h01p64qx8Y6NXNo6Y+uPJlp8rLf0rfG7MXtHa\n0x8pZxqs9e2a8f8A7Oj7HaTHn0+f6RWM23LETfr6vRW4PoL99NT8ui7F4+vEdXXtnt+fVbXjGsr/\nAOZE/OsPS29nuH27YrV+VpeV9pdPXhOtw49NG9Mld55+vXcTPd42I47qo7xSfyWV9oM8d8VJ/VxM\nd8l46xWF9cV7en6o/qLfxp2I9ob+eCv/AHMo9op89P8A/wBORGmyT5R+qfo2X8P7n9Q/jTsx7RR5\n6ef+4/8AuHftg/8A6cWcOSO9J/WEbWr3pY7Efzp2Lcfv5YK/9zWy8d1E/ZpSv5Oba1/+Hb9lc+LP\nbFt87I7E/wAabWbiurvEx4nL/pjZzc2bJkn372t85ZXx55/BX85lucC0vPxnTxlnnjm32mOiZqUu\nLJ2p4TwnVavNWaYbRTfre0bQ99pcH0bT0xb78vmtiIiNojaErMwAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAHnfarF7umzRHaZrLjYrdIen9ocPi8JyTt1xzF4eUw23rCm3R4r6bMy\nwt6kdTaWLdjswmNoZontsCm0K5XWjopnuDC0dGpqG5bs08/daKV672MjbSaif6oh6Z5f2LtvptRX\n0tEvUN3Jfo8f7cYve0eX4zV7B5z20xc/C8eSPuZIRficfXlcPaG7ino08HWIbePpLF2NuiyOyrHK\n3fZFSwuovHVfaVF4QK5YWTM9UT0EKry6Ps1Tn4zjn8NZn9nOtLseydObiWW34cf918fWfk+PYANn\nKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAq1WKM+ly4p+/WYeBxTNd6zG0xO0\nvobw3FcP0bi2em20Tbmj5Srr418V9sa2Z7qKyzi07MXUylhaU7yjqhLCeiq3ddaFNxFYW7NLNG8t\nzya+WO6Va9J7FW66mvwidnrXiPY3Ny8RyUn71Jj9Ht3RPjk19HK9pMHj8D1ER3rHN+jqqtTjjNps\nuOe16zAifXzfTz7kNyndpYazS9qT0mszDdoxrsi6m8LazMq6zDOsq1ZEyrt1WWlXaUCqyq0rbKbi\nFdp6PReyFd8uqv8ACsfy83aXrPZHHto89/xX2/SP/dpj6y8vx6EBq5gAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAB5n2q03LfDqqx39y39npmlxbS/TOG5se29tuavzgWzeV4mtui2\nO3RRSY2hdVhqO2MvI36iu9lUsrSrvDHn6spnmSiq5jooyV6tq1VV69RC32byTh43h8otMx+r6I+Z\naK/g8TwX7bXh9Mid4iW+fjl8n1ICWb57xLBOm4zqse20Tbmj8+qKdnS9q8PhcTw5tumSm0/OHMxz\n0Za+uzx3sX1t0Zxurr1ZxvspWiZYWZbsbT0QK7KLrZVZJFaqt5vbezNOTg9J/FaZeJns93wCvLwb\nT/GJn92uGHldIBowAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADuAPA67F9H4l\nqMW20VvO3yRWW97T4fC4rXJHSMtI/WGhVlue3b473K2KzMML4+62tujG9pnozXaOSOVFMnVbmq1t\ntrJRW5E7wwvUxTvCyY6CHOt7moxz6Wh9PxTzYaT61h8x1MbZK/OH0zTf+Fxf6I/htj45vL9WgLMn\nmvbPFvocGWO9L7fq85p5maw9d7VYvE4JkmPu2if3eW0+PasdFNOnxfF1Y2hlykRsmY+LJ0MZjZXa\neq2eyi8oQTO0KLdZWzPRjWu6VaqtHR73g0bcI0sf0Q8Nkq93wqNuFaWP+XDTDDytwBowAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAef9q8HNpcGaI60vtPyl56k9Iew49j8ThGe\nPwxFv0l4zH2U26fDfTYiyJljvsjf4sm6vJ1hrXjq2MkqLdZEVbgbMx0auGdmzNt6iHN1Ub5af6of\nTdPG2nxx6Vj+HzaaTm1+nx/iyVj930ysbViPRrj45vL9SAuyc7j1efguqj+jd4/T33rD3HEcPj8O\n1GP8WOY/Z4TTT7sKadHhbcsZnaCJ3TPZk6VdrKbTutmP0U2nqgrGOsr8deiuI2X09EqKM1dt3uuG\nf/jdN/06/wAPE546S9rwud+Gaaf+XH8NMMPK2wGjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAABrcRp4nDtRWPPHP8PCYusPoWSvNjtX1iYfPuWaXtX8MzCuvjfw32siu8ptXoxi\n0wy5t4YulReqmazu2skbquURWFInddM7VYRGyL291KFnCcfj8e0le/Lbmn8n0N4b2Ur4nHLWmPsY\n5e5a5+OXyXugBZmiY3iY9Xz7NjnTa3Ph/BeYj5PoTxftFg8Hjk2iOmWkW/Psrr418V5WrWd2faFc\nV2jdnEMXWxntupmN7NiYU27iWML6dVMVnddjgVqMsdHr+CW5uE6f4Rt+7yuSsTDv+zWXn0WTHP3L\n/tK+GHl+O0A1c4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8Dn93W56/wDM\nt/L3z59qp24jn+OS38lnpr4r7ZxHQ2TEstt3PXUrt27K57rr1VT0BjKnJPRbMqMs7QlV2fYvHvrd\nVknyrEfu9m8f7FZI8fVU85iJewbT45NfQBKo817W4eulzxHaZrL0rje09ItwqbfhtBVs3leai8RD\nKLw1sduesL606dWFdsZT1jdhNeq6K9DlhCVUU6s4jZnt1YzAhnM71dH2bycmszY/K1d/0c6OzY4R\nfwuK4p8rTstn6z8k7HrwGzkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHz3\nVxvr80/8y38voTwGpj/F5/8AqT/JfjTx/WVeyY6FPspc9dZPVXaOq2WEwIUTVRmjo2rNfLHRI3vZ\nDJycXtX8dZh7t879nsnhcbwz23tt+r6I2nxyb+gCVBzuPY/E4PqI9K7ui19fTxNBnp60n+Aj5/pJ\n3jZu1aOnnltMNussdfXbm+l3ZM9URHREdZVXTuT1Nk7boQiOkJw28PU47/htEp5eivJPLMTCZ9Vv\nx7mJ3iJ9UqNHk8XR4b+tIXuhxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD\nweqjbWZ4/wCZP8vePCaz/wDIaiP+Zb+UX408f0r9lOxWOifJhXWjfyYWllPRXYQxnrCrJHRd3YZI\n6A1NJecHEsN/S0T+76bE7xE+r5dk93LW3pL6ZpMni6PDf8VIn9m2fjm8s9rgFmQxvHNS0esbMiew\nPnHLyai9fS0w2aNfUTtrs3+uf5bGPqy068fF227KtSsdFlKqNGMV6myyY6sbdIQI8tlOWOi6Jhhk\nj3RD0vA8nicMx9etZmHRcT2Zyb6XNT8N9/2dt0T449T2AJVAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAHhdfG3E9TH9cvdPEcXjk4zqI/q3L8aeP6xr2TsxpLOekMK6mFo6qpXSrm\nOqBixvHSVmzC4OfqK7S9/wAByeLwbTW9K7fo8Fqo6Paeyl+fglI/Da0NcMPK7QC7AAB8313TiOf/\nAKk/y2MHWrX4jG3E9R/1Lfyv0/aFNOrHxuU7LI7MMayGTVlHWUXhNe6Z6wIUsb9d1m20q7dkDpez\nN9tRqKT5xEvRvKez9+Xis1/FSYerb5+OTyf6AFlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAB43j9eXjN/jWJ/Z7J5L2mry8Upb8VIF8f6aGOey2eynHvOy7bowrrYSxZSwQJ2YXZ\n92N4BoanrEvVexmTm4blr+HJ/aHltRHSXofYm/1Wrp5RaJaYY+X49WA0c4AD51xONuKan/qW/lbp\n+0MOLRtxbU/9SU4J7KadWPjep2WQrr2WRPRk1TvsndXMpiRCb9FNu0rbTuqvKBscCjfi9PhWZeue\nV9n434rafTHL1TfPxy+T/QAszAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHmv\navHtfTZfnV6VxPajHzcNrf8ABeJFs/XnMcr4no18c+6vr2YadkY2YM57sEDLyY37Mo7MMnYGlqO0\nvQ+xNfqNVb1tEfs87qZ2rL0/sVX/AHdnt65P7Q0wx8vx6UBo5wAHz/jUbcX1PT78qtO2vaCnJxjP\n8Zif2amnnspp04+OjWejKJ6MKdmcMmyJn4m5ZHzEVPMwtJv0VZLbQDqezcb8RzT6Y/7vUPM+ytZt\nn1OTyiIh6Ztn45N/6AFlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABocbxeLw\nnUR5xXm/Rvq8+OMuDJjntaswEeBxT0bNZ6NatZpNqz3rO0rqsdO3PxlaWEMpY+aqWXkryT0ZT2V3\n7A0dVPuy9f7G124NM/iyT/Z4zWT7sw957MYfB4Fp4/FE2/WWmGHldcBowAAeM9qKcvFeb8VIly9P\n0nq7ntbTbVYL+tJj93CwT76unR4/jo0nozhhTsy3Y1sWljM9Ce7HyQIm3RRlttVbaWrnt0Sh6n2U\nx8vD8mSfv3/h3XN4Bi8Lg2nj8Uc36y6TeOPXugCUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAPD8RxeBxXUU26Tbmj8+quro+02Lw+I4ssdslNvzhzazvDPbq8d7GW7Dfqz2VzG\n0s2qd+iu/Zn5Ksk9BVztX1mI8930zh2LwOHabH+HHWP2fNYp4+vwYvxXiP3fUqxtWIjyjZtj45/L\nfaQFmQADzftfj3w6fJ6WmHmsP23rvaqnNwqLfhvEvIYZ+sV038bo0noy36MK9oZQxrdMyrlnMbMZ\nQKrS1M07zEestq/RRjr4utwY/wAV4j91p9V18fQdJj8LR4ccfdpEfsuREbREJbuMAAAAAAAAAAAA\nBAJAAAAEAJEAJQAJQAJEAJQAJQAJEACUJAQlAJEAJQAJQJAAAEAJEAJBAAAJAABAJEJAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABwvanDzaPFmjvjv8A\ntLztJ3h7HjGHx+FainnFeaPnHV4vFbeIU038VbHeGF+kso7Mb9mTdhKnLK3dRm7SIrHhGPxeP6Sv\n9cT/AHfSnz72Zx+J7Q45/BWZ/Z9BbZ+OXyfQBZQABzeP4/E4NqI9Ii36S8Ng/wAx9C4jTxOH6ivr\njn+Hz3B/mQi/GvjdCnWNlsdI2V07LIlg6USrt2ZzZXMoFV+zPhGLxeOaavpbm/RVltEN72Yx+Jxm\nb7dKUmf7L5+s9/HtRA2cqRACRACRACRACUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAACQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQCQQCRACRACRCQBCQBCQB\nACRACRACRACRACL1i9LVntMbPATTwdRkxT3pea/u+gPE8Xx+DxrPHlaYt+qNfGvjvtXXsi0dOrKk\ndEXjZg6VMtbP2bMtXUdpEV0/Y2nNxbNf8OP+727xvsXH+N1U/wBEfy9k3nxyb+gCVQAGOWvNivX1\nrMPnGGOXNNfOJ2fSZ6w+dZKeHxDPX8N7R+6L8a+L63KdoZ7q6zvEMpnowdKJ6ywmWUyqvIKM0vQ+\nx+D6rU55+9aKx+TzWa36vbezmDwODYenW+95/Nphj5L6dQBo5wAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAEiAAAEoA\nAAAAAAAAAAAAAEAkEAkRuAkQbgkQAkQAkQAkQAl5T2nx8nEMOT8dNv0l6pwfarHvpcGWPu32/WCr\nYvK4mOem6b9mGKd4Z3idmFdka0y1c892zfpMtLPaNpEV6D2Kj/Eauf6YeweQ9ieuTVz8K/3evbT4\n5NfQBKoAA8FxCvJxrUx/XMvevD8Zry8fz/Haf2RfjTx/6RSOnRMyypHu9kXjowrqVSrvPRnZVl6V\nkK0775MsUjvadn0nT4ow6bFijtSsVfPuFYvpPGtNTy54mfy6vorXDm8l9pEC7JIgBIgBIgBIgBIg\nBIgBIhIAgBIhIAgBIgBIIBIAAhIAhIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJAAAAAAAAAAAAAAA\nAAAAAAAAABAJQkAEAAAAAAAAAAjc3BIjdG4Mkbo5kcwMjdhzHMDPc3V8xzAs3N1fMjmBZubq+Y5g\nWbm6vmOYFm5ur5jmBZubq+Y5gWbm6vmOYFm5ur5jmBZubq+Y5gWbm6vmTzAz3N2HMnmBlu5ftFTx\nOEZJ/DMW/d0t2rxKni8N1FPWkiZ9eS08e7Cy8dGGn6UhZaJljXZGnmc3UT3dPP2cnUT78xCIV6j2\nH/8A9c/6f7vXPI+w8bU1U+vL/d63du5NfUiDcVSIAS8b7RV5eOb/AIqRL2TyXtNX/e2KfXH/AHlF\n+NPH/pr4+2xcxx0hFpY11K7R16KM32ZWz3UaidqSgrc9kcPicWyZJjfw6T+727y3sXh2xarN+K0V\nh6lvPjj3e0ASqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJQAAAAAkQAkQAkAAAAAAAAAAAAAAA\nEgAAAAAAAAAAAAAAAAAAAAAgAAABKDcAN0bgkY8xzAyRux5kcwM9zdXNkTcFm6OZXzMeYFvMibKu\nZHMC2bo51U2RuC2bom6rc3BZzom6sBZzI52ADPnOdggFnMc6skFnMc6rc3BbznOp3RzAv50c6nml\nHMC/nOf4qOY5wX85zqOc5wbHOc7X5znBsc6edr85zg2ec52vzpi4NjmY5bROG+/bllVzsNTk5dLl\nn0pP8BHmMHWNmzt0aum8obm08vVjfrtnxztR0mXHzTvaZdjVRMTLkZo6yiFen9iZ2pqY/wBP93rN\n3kPY+/LfPX1rE/u9XzN3HfqzdO6vmTuIZ7m7Hc3Bnu8t7TR/vHBP9E/y9Pu837SV31umn+if5Rfi\n/j/01MMb1hjkrtKzBG0bMsmOZY11tOYamr6Und0LUc7XT7u3rJPqL8er9lcPhcFpbzyWm39v7O00\n+FYvA4Zpsc94xxu227jv1IAgAAAAAAAAABKAAAASgASgBIgBIgBIgBIhIAAAAAAAAAAAAAAAAAAC\nUACUJAAAAAAAAAAAABIAAAAAAAAAAAAAAAAAAAAg3AEbomQZbo3YzLGbAz3RNlc3YzcFs2YzdVN2\nM2Bdzom6nmNwW86JurTAMuY3REJ2BB1ZRVMVBhsbSsiqeUFXLucq3lTygp5TlXcpygp5TlXcpygp\n5TlXcqOUFXKjlXcrGYBXysdlswiYBVMdUTCyY6sZBWxlnMMZgGLGZZSwkDdHMiWO4MuY5mEyjcFn\nN1OdVzHMC3nTzqeY5gX85zqOZPMC+Lqdbk20eb/RKOZr8QybaK/XvtH7iZ9aGlp2luzT3fg19NHS\nOjbmPcYX67XH1XSZ9XIzRvMuzrK7zLkZYmYnciunb9lZ5dTk+OP+71cXeP8AZnJ/ip2nf3J/l6iL\n/Fu5L9bMWZczXi6YuIbEWTzKIuyiwLt3nuO25uI4a/hx7/rLuczg8TicvFLbfdpEK6+NPH/phhjo\nstLGkctUWnoxrrU3j1cnWTzZq1jzl1clo5Zcu8c+txR63iP3Tn6pv4+g4o5cVI9IiGe7CJ2iE7t3\nGyN2O6dwSINwSISAlAAlACRAAlAAlACRACRCQAAAAAAAAAASgASISAAAAAAAAAAAAACQAAAAAAAA\nAAAAAASAAAAAAAAAAAAAAAAIAAAQCAJljuljsCJlhMs9mOwMJYys5TkBVsjZdyHICrZPKt5E8oK4\nqmKrOVOwMIqyirPY2Bjyp2ZbAI2NmSARsbMgEbI2ZAMdjZICNkbMkSCNmOzJEgx2YyzljMAwlhKy\nWEwCuWErJhhMArlhLOWEgxljMpljIImWMyTKJA3N0IBO5vux3NwZbnMx3NwZczT4jf3MdPW27a3a\nfJOq1XNP2KdIRfi+J2trSYfcjeF+Wm1OicVeWIiN9kai8xjY12ORqultnI1Ecsujq79XP1FovWYI\nrTgeq+j8QrWZ+3Mx+r2UXeC0WG2Ti2kiN5mL807eUREvbzbaejefHJv62Iv8WUXa0WTFhVtRdlF2\nrz9WUXBtc7jR9dqc2T1ttHyhvZMvJitb0jdq6XHNcNenWVN3028U99WRj6Kb02be3Tq18/SN2Lpc\n3UdN9nOmZrqKX/DaJ/d0svvTLRzV3jomK6+Pd1vvWJj0ZczT0mXxNJht60hfFnQ4qu3N1cWTEgs3\nTur5k7gz3N2O5uDM3Y7m4MtxBuCQASIASIASAAAAAAACRCQAAAAAAAAEoSAAAAAAAAAAAlAAlCQA\nAAAAAAAAAAASAAAAAAAAAAAAIASgAAAEJAQJQCNkbMgGOyOVnsAw5TlZ7GwMOVPKy2NgY7GzIBGx\nskA2AAAAAAAAAAQkBAEghEskAxYzDPZGwK5hjMLJhjMAqmGEwumrCagomFcw2JqqtUFEsLLrV82F\no7gqljKyYYTGwMZRKUSCAQAboJnaN5Bjkneu0d5W4ccViIiOzHFWbTzNumP1Zarr8eeRMbxDW1Mx\nNO67NbkhzNVnmInqzaOZrL93JyZeV0M1++7S02jvxDWxhxx033tPpC8Z6rrezWjmZyazJG2/u03h\n2vFibTHoqvamiwVwY+nLGzV0+SZ1Mx8G0/45tOhzJ5lXMc3UVXRdlF1HP+iYsDPLPPy49/tz1+Te\npSIr0ho6ak5Ms5J8o2q6NImOrHV7XX488ypzTtHXo0s9t6zG7c1G1qz6ubeZiZ3UatXJG3yauSO7\ncvMTEx5tPLb3prPRMVr0HB8vicNxf0+7+kt+LOJwTJyY/Bnz3tH93X36N58cWvq6LSyiyndMSlC7\nmZcymLJiwLosmJVRLKLAtiU7q4lMSCzc3YxJuDMRuAlKAEgAAAlAkAAAAAABKAEgAAAAAJAAAAAA\nAAAAAAAEgAAAAAAAAAAAAAkAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAhIAAACAAAASgAAAAAAEAAAA\nhGzJAImGMwzQDDZjNVuyNgUTVhNGxysZqDVmiu1G5NN2M4waM0+DCaN2cbGcQNGaMZq3JxMJxA1J\nqx2bU4kU09slorWNwa20z02RXHbJbl26QvtFovbHWkxEdJt5y2MOHlr2U1W3jx+1hiw8vSO63lmI\nXRTaEWmtY6snRHO1VpmJ+DjavpSZl2s8b7y4HFcnh0n0gha5ebJN55KRM2mdoiPN6fh+kpwXh0Wy\nRHj5Otp/s5Ps1p62y31+em9aTMYt/OfVfxTiPjZ52naI7fBrI5t66xz5+a1rW7yx0eSL6iZjtEOX\nqNbSletom3lENjh2fbHzbbWt3iVozruc+5ztWubf4M4ybpQ2Oboyrva0Vjza8WdDR4OkXt3n9ldX\nkaePP9VtYqctYhdvt5oivTeCZ2YOxXk6ubqMfV0b9mrljfqlFcq88k7z2U5axeItDa1OPessuC8P\nya7XRWYnwqdbT/ZMilvIu4dpslNdixXja8Y5tt85djZdbDWnGOesRtXFtuw6T27No5Kx2OrKYQlC\nExKJgBnEpiyvdlEgsizKLKollFgWxLKJVRLKJBbEp3VxLKJBnuMWQJEbpBIAAAJAAAABIAAAAAAA\nlAJAAAAAAAAAAAAAASAAAAAAAAAAAAAJAAAABAJABAlAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAA\nAAABAJQAAAAgAABAAI2EoBGyJhkgGPKxmqxAKpownHC+YRMdN5BrTj67R3bOn01o7p01Iv71u89o\nb9a7LfBTfS1vWI2jf12VfQPSW8KX2mas+NC2iv6xMNfJpMnLtEbuuxtMRCtzF55NR5rPps1N/ctP\ny6uHreE6nXZ4pak48X3rT06fB7fNeI33cbX6mI32R/MWu7XF116aDSRhxbRERs8f499bkyZeeKae\nkzE2mdon81/tfxDLGOunwbzlzbx08oaHBvZHJlx48mrvaa94pu04y617576rNGLRRM0397JEd/lu\n9Dw/S3x4qxffo6mm4NjwUiKY4iI9Ib1dHFY6QIaNabbrYrLfrpJtaK1rMzPZb/s+05IpP59OyLeJ\nk7eNfRaOc1ue32I7fGXYpi5Y77M8OGMeOKxHSFsU3Y29deZMzirl6dlVvhLatCjJHeYQv1rXnps1\n8k9/VsW6qLVmZIi1rzitlvFKRvaZ2h6TSaenC9FFY+3brM+sqeG8Prp4+kZ+lvuxPkr1mqm95nfp\nDXM459676a2q1dsV7XietvNno78+CJn1cjX6mOeIm0bR33dfRU5NJjidt9t5afjG/V6JZ7I2QMNh\nnyo2BhsMuVG3wAhMSbbQRAMolnE+iuGUSCyJZRKuGUSCyJZK4llEgyZMYTuCUsYSCQASISAAAlCQ\nAAAAAAEoASCASAAAAAAAAAAAAlACRACQAAAAAAAAAEgCEoASCAAAAAAAAAAAAAAAAAAAAAAABAAA\nAAAAAAAISAIAAAAAAQAAACASgAAAQJAQAAhIDHZhln3do7z0WS18mWsajHjmes7pg3dNi5aRMNqO\nyvDHTpPRaigHZhN4hHRlaVN59JY3zRENLUavaO+yq0iNVlitJ6vNcR1MVi0zO0era1/Ea0rPvbz5\nPM5MWp45qvo2GZrhmfrsnpHpHzTCseEcM/2vrr8Q1Eb4qzy44nziPN63HpYiIiI7LNHoqabBTFii\nIpSNohuVxrKtWMEejPwY9G1FFmHB4mWJn7MdfnIM9JpIx15to5pbUaas/a6rqViI7MxPxqX0UT1r\nO3wVzpbR2hviP5i03Y5s6a879FNtHljydhExCv8AMTPJXBnRZbz0iG5ptFjwe/l96zctMVamTJtE\nyTMibu1VrdTzRMR0j0ed4lr64MVpm0RERvMz5NvX62uOJ69XhOKX1HH9bHDtFvNYnfJeOy0Z2ojX\n6jjnEq6fRUmccTvN/J9H0eKcOnx45neaxEbubwHgOHg+milI3vP2resu3Wu0JQmITsmISDHZHKz2\nJgFc1RMLJhGwK9iIZ7MZgEdgmAEwyiWCdwWRLKJVxKYsC2JTuriWUSDNlEsIlMAySx3SCRCQSIAS\nAAACRACQAAAAAAASIASAAAAAAAAAAAAAAACRACRACQASIAAAAAAAAAAAAAAAAAAAAAAAAQCUAAAA\nAAAAAAIAAAAAAAAQAAAAAACBICBICAAEJAQJQCJcLjuS2ny6fPG/LWdpd1o8T0X07SXx/e7wCdJx\nWa0jmneHQpxPDMdZmJfNtZm49weZrh0/j4o7VtSZ2+Uw0/8A7o49k92vBLc/ntFohFW9PqGXimOI\n6Tu1L8T3eCx6r2t1O3JwvHjifO99v7t/Bwf2l1PXU6rS6eJ8qUm8x+so5TsekzcSjbvs4mt4rzW5\nK2mbT0itesy2cHsvbvqtbmyz5xERWP2jd1tJwrTaONsOKtZ8585+cnDrzmn4Rq+IZObUROHD32n7\nVv8A0ej0uhxaXFGPFSK1j0bkY4jyZRVZVXFGUVWbGwKsk8mObekNrSW3pWf1a2aYjHbm7bNnQ1id\nPW0TvuDdhJEbQABMsLW2R0ZTMQrvfbz2YWzVhpanUxEd0dWkW5c8R5uXxDX1w4pnfr5Q19XxKuOJ\n2neXltVqtVxbV/RdJ715+1bypANfiOu1HENV9C0MTfNeesx2rD1PAeBYuE6aKx72W3W9/WVnBuB4\neF4dqRzZbdb5J72l160WVK02ZxCYhOwI23TsnY2BGxsnYBjsiYZsZBjMMZZSgGEolMsQDdG6NwZ7\npiVe6YkFsSziVMWZRILolMSriWUSCyJTuwhMSDMRCQSI3SAlACRCQAAEoAEoASAAAAAAAAACUACR\nACQAAAAAAAAAAAAASAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAABAAAAAAAAAAAAACBKAAAAAAAQ\nJQAAAhICEbJAYTWJ7wx8KvpC0BV4ceieWGewDHlNmWwCNjZICNhIDmcZredBecdpiY69FXCOLW+i\nUiZidukulmxxlx2paN4mNng+K4+I8Hy2yaTfl37TXetoCPfRxfp1qi3F48ofKMvtvxak8s6LDv61\nrZji9rPaLUf5PC+bfttS0q8q3p9W/wBrRMdpUZuKdN99nzvFqPbTVz7nD8OKs+do2/mW3h4D7Xaq\nZnPrtNpqz35aRaYOHY9Zk4pNt9rR+rl6zi+OnS+WN57Rv1lXp/YrNaYtruL6zNPnGO3hxP6O5w/2\nf0HDuun09Yv55Le9afznqcOvO4tBreMTHu30unnva0bWt8on+70nDuE4OHYYx4Kbesz3tPrMuhGO\nIjpDOKrK9YVpsyiGUQnYGOyUgI2SlAIEmwMWMs9kTAMJYzDOYRMArmGErZhhMArlHmzmGMwDE3Ts\nbAbs4swj5pgFkSziVcM4BZEsolXDKAZwyhjCYBkACQhIAAAAAAAJAAAAAAAAAAAAAAAAAAAShIAA\nAAAAAAJAAAAAAAAAAAAAABAJEAAAAAAAAAAAAAAAIEoBKAAAAAAAAAAAAAAABAlAAAAAAAIAAAAA\nBAkBAkBAkBAlACEgMZjdjbFW8bWrEx8YWANb6Fp+bfwab+vLDKMFK9qxH5L0bAr8OPRPKz2AY7J2\nSbAjYZAI2E7AIEgIEgIEgMdkSy2NgY7MdlmyNoBXsxmFuyNgVTVjNV3KjlBRNTlXTVHKCrlIqt5T\nlBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/\n2Q==`;\n", "/**\n * Warmup algorithm that uses embedded images to excercise loaded models for faster future inference\n */\n\nimport { log, now, mergeDeep } from './util/util';\nimport * as sample from './sample';\nimport * as tf from '../dist/tfjs.esm.js';\nimport * as image from './image/image';\nimport type { Config } from './config';\nimport type { Result } from './result';\nimport { env } from './util/env';\n\nasync function warmupBitmap(instance) {\n const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());\n let blob;\n let res;\n switch (instance.config.warmup) {\n case 'face': blob = await b64toBlob(sample.face); break;\n case 'body':\n case 'full': blob = await b64toBlob(sample.body); break;\n default: blob = null;\n }\n if (blob) {\n const bitmap = await createImageBitmap(blob);\n res = await instance.detect(bitmap, instance.config);\n bitmap.close();\n }\n return res;\n}\n\nasync function warmupCanvas(instance) {\n return new Promise((resolve) => {\n let src;\n // let size = 0;\n switch (instance.config.warmup) {\n case 'face':\n // size = 256;\n src = 'data:image/jpeg;base64,' + sample.face;\n break;\n case 'full':\n case 'body':\n // size = 1200;\n src = 'data:image/jpeg;base64,' + sample.body;\n break;\n default:\n src = null;\n }\n // src = encodeURI('../assets/human-sample-upper.jpg');\n let img;\n if (typeof Image !== 'undefined') img = new Image();\n // @ts-ignore env.image is an external monkey-patch\n else if (env.Image) img = new env.Image();\n img.onload = async () => {\n const canvas = image.canvas(img.naturalWidth, img.naturalHeight);\n if (!canvas) {\n log('Warmup: Canvas not found');\n resolve({});\n } else {\n const ctx = canvas.getContext('2d');\n if (ctx) ctx.drawImage(img, 0, 0);\n // const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);\n const tensor = await instance.image(canvas);\n const res = await instance.detect(tensor.tensor, instance.config);\n resolve(res);\n }\n };\n if (src) img.src = src;\n else resolve(null);\n });\n}\n\nasync function warmupNode(instance) {\n const atob = (str: string) => Buffer.from(str, 'base64');\n let img;\n if (instance.config.warmup === 'face') img = atob(sample.face);\n if (instance.config.warmup === 'body' || instance.config.warmup === 'full') img = atob(sample.body);\n if (!img) return null;\n let res;\n if (typeof tf['node'] !== 'undefined') {\n const data = tf['node'].decodeJpeg(img);\n const expanded = data.expandDims(0);\n instance.tf.dispose(data);\n // log('Input:', expanded);\n res = await instance.detect(expanded, instance.config);\n instance.tf.dispose(expanded);\n } else {\n if (instance.config.debug) log('Warmup tfjs-node not loaded');\n /*\n const input = await canvasJS.loadImage(img);\n const canvas = canvasJS.createCanvas(input.width, input.height);\n const ctx = canvas.getContext('2d');\n ctx.drawImage(img, 0, 0, input.width, input.height);\n res = await instance.detect(input, instance.config);\n */\n }\n return res;\n}\n\n/** Warmup method pre-initializes all configured models for faster inference\n * - can take significant time on startup\n * - only used for `webgl` and `humangl` backends\n * @param userConfig?: Config\n*/\nexport async function warmup(instance, userConfig?: Partial): Promise {\n const t0 = now();\n instance.state = 'warmup';\n if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;\n if (!instance.config.warmup || instance.config.warmup === 'none') return { error: 'null' };\n let res;\n return new Promise(async (resolve) => {\n if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance);\n else if (typeof Image !== 'undefined' || env.Canvas !== undefined) res = await warmupCanvas(instance);\n else res = await warmupNode(instance);\n const t1 = now();\n if (instance.config.debug) log('Warmup', instance.config.warmup, Math.round(t1 - t0), 'ms');\n instance.emit('warmup');\n resolve(res);\n });\n}\n", "/**\n * Human main module\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\n// module imports\nimport { log, now, mergeDeep, validate } from './util/util';\nimport { defaults } from './config';\nimport { env, Env } from './util/env';\nimport * as tf from '../dist/tfjs.esm.js';\nimport * as app from '../package.json';\nimport * as backend from './tfjs/backend';\nimport * as blazepose from './body/blazepose';\nimport * as centernet from './object/centernet';\nimport * as draw from './util/draw';\nimport * as efficientpose from './body/efficientpose';\nimport * as face from './face/face';\nimport * as facemesh from './face/facemesh';\nimport * as faceres from './face/faceres';\nimport * as gesture from './gesture/gesture';\nimport * as handpose from './hand/handpose';\nimport * as handtrack from './hand/handtrack';\nimport * as humangl from './tfjs/humangl';\nimport * as image from './image/image';\nimport * as interpolate from './util/interpolate';\nimport * as match from './face/match';\nimport * as models from './models';\nimport * as movenet from './body/movenet';\nimport * as nanodet from './object/nanodet';\nimport * as persons from './util/persons';\nimport * as posenet from './body/posenet';\nimport * as segmentation from './segmentation/segmentation';\nimport * as warmups from './warmup';\n// type definitions\nimport type { Input, Tensor, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './exports';\n// type exports\nexport * from './exports';\n\n/** Instance of TensorFlow/JS used by Human\n * - Can be TFJS that is bundled with `Human` or a manually imported TFJS library\n * @external [API](https://js.tensorflow.org/api/latest/)\n */\nexport type TensorFlow = typeof tf;\n\n/** Error message */\nexport type Error = {\n /** @property error message */\n error: string,\n};\n\n/** **Human** library main class\n *\n * All methods and properties are available only as members of Human class\n *\n * - Configuration object definition: {@link Config}\n * - Results object definition: {@link Result}\n * - Possible inputs: {@link Input}\n *\n * @param userConfig: {@link Config}\n * @returns instance of {@link Human}\n */\nexport class Human {\n /** Current version of Human library in *semver* format */\n version: string;\n\n /** Current configuration\n * - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L250)\n */\n config: Config;\n\n /** Last known result of detect run\n * - Can be accessed anytime after initial detection\n */\n result: Result;\n\n /** Current state of Human library\n * - Can be polled to determine operations that are currently executed\n * - Progresses through: 'config', 'check', 'backend', 'load', 'run:', 'idle'\n */\n state: string;\n\n /** currenty processed image tensor and canvas */\n process: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };\n\n /** Instance of TensorFlow/JS used by Human\n * - Can be embedded or externally provided\n * @internal\n *\n * [TFJS API]\n */\n tf: TensorFlow;\n\n /** Object containing environment information used for diagnostics */\n env: Env;\n\n /** Draw helper classes that can draw detected objects on canvas using specified draw\n * @property options global settings for all draw operations, can be overriden for each draw method {@link DrawOptions}\n */\n draw: { canvas: typeof draw.canvas, face: typeof draw.face, body: typeof draw.body, hand: typeof draw.hand, gesture: typeof draw.gesture, object: typeof draw.object, person: typeof draw.person, all: typeof draw.all, options: DrawOptions };\n\n /** Currently loaded models\n * @internal\n * {@link Models}\n */\n models: models.Models;\n\n /** Container for events dispatched by Human\n * {@type} EventTarget\n * Possible events:\n * - `create`: triggered when Human object is instantiated\n * - `load`: triggered when models are loaded (explicitly or on-demand)\n * - `image`: triggered when input image is processed\n * - `result`: triggered when detection is complete\n * - `warmup`: triggered when warmup is complete\n * - `error`: triggered on some errors\n */\n events: EventTarget | undefined;\n /** Reference face triangualtion array of 468 points, used for triangle references between points */\n faceTriangulation: typeof facemesh.triangulation;\n /** Refernce UV map of 468 values, used for 3D mapping of the face mesh */\n faceUVMap: typeof facemesh.uvmap;\n /** Performance object that contains values for all recently performed operations */\n performance: Record; // perf members are dynamically defined as needed\n #numTensors: number;\n #analyzeMemoryLeaks: boolean;\n #checkSanity: boolean;\n /** WebGL debug info */\n gl: Record;\n // definition end\n\n /** Constructor for **Human** library that is futher used for all operations\n *\n * @param {Config} userConfig\n * @returns {Human}\n */\n constructor(userConfig?: Partial) {\n this.env = env;\n defaults.wasmPath = tf.version_core.includes('-') // custom build or official build\n ? 'https://vladmandic.github.io/tfjs/dist/'\n : `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;\n defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';\n defaults.backend = env.browser ? 'humangl' : 'tensorflow';\n this.version = app.version; // expose version property on instance of class\n Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself\n this.config = JSON.parse(JSON.stringify(defaults));\n Object.seal(this.config);\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n this.tf = tf;\n this.state = 'idle';\n this.#numTensors = 0;\n this.#analyzeMemoryLeaks = false;\n this.#checkSanity = false;\n this.performance = {};\n this.events = (typeof EventTarget !== 'undefined') ? new EventTarget() : undefined;\n // object that contains all initialized models\n this.models = new models.Models();\n // reexport draw methods\n this.draw = {\n options: draw.options as DrawOptions,\n canvas: (input: HTMLCanvasElement | OffscreenCanvas | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) => draw.canvas(input, output),\n face: (output: HTMLCanvasElement | OffscreenCanvas, result: FaceResult[], options?: Partial) => draw.face(output, result, options),\n body: (output: HTMLCanvasElement | OffscreenCanvas, result: BodyResult[], options?: Partial) => draw.body(output, result, options),\n hand: (output: HTMLCanvasElement | OffscreenCanvas, result: HandResult[], options?: Partial) => draw.hand(output, result, options),\n gesture: (output: HTMLCanvasElement | OffscreenCanvas, result: GestureResult[], options?: Partial) => draw.gesture(output, result, options),\n object: (output: HTMLCanvasElement | OffscreenCanvas, result: ObjectResult[], options?: Partial) => draw.object(output, result, options),\n person: (output: HTMLCanvasElement | OffscreenCanvas, result: PersonResult[], options?: Partial) => draw.person(output, result, options),\n all: (output: HTMLCanvasElement | OffscreenCanvas, result: Result, options?: Partial) => draw.all(output, result, options),\n };\n this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };\n // export access to image processing\n // @ts-ignore eslint-typescript cannot correctly infer type in anonymous function\n this.process = { tensor: null, canvas: null };\n // export raw access to underlying models\n this.faceTriangulation = facemesh.triangulation;\n this.faceUVMap = facemesh.uvmap;\n // set gl info\n this.gl = humangl.config;\n // include platform info\n this.emit('create');\n }\n\n // helper function: measure tensor leak\n /** @hidden */\n analyze = (...msg: string[]) => {\n if (!this.#analyzeMemoryLeaks) return;\n const currentTensors = this.tf.engine().state.numTensors;\n const previousTensors = this.#numTensors;\n this.#numTensors = currentTensors;\n const leaked = currentTensors - previousTensors;\n if (leaked !== 0) log(...msg, leaked);\n };\n\n // quick sanity check on inputs\n /** @hidden */\n #sanity = (input: Input): null | string => {\n if (!this.#checkSanity) return null;\n if (!input) return 'input is not defined';\n if (this.env.node && !(input instanceof tf.Tensor)) return 'input must be a tensor';\n try {\n this.tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n };\n\n /** Reset configuration to default values */\n reset(): void {\n const currentBackend = this.config.backend; // save backend;\n this.config = JSON.parse(JSON.stringify(defaults));\n this.config.backend = currentBackend;\n }\n\n /** Validate current configuration schema */\n validate(userConfig?: Partial) {\n return validate(defaults, userConfig || this.config);\n }\n\n /** Exports face matching methods */\n public similarity = match.similarity;\n public distance = match.distance;\n public match = match.match;\n\n /** Utility wrapper for performance.now() */\n now(): number {\n return now();\n }\n\n /** Process input as return canvas and tensor\n *\n * @param input: {@link Input}\n * @param {boolean} input.getTensor should image processing also return tensor or just canvas\n * @returns { tensor, canvas }\n */\n image(input: Input, getTensor: boolean = true) {\n return image.process(input, this.config, getTensor);\n }\n\n /** Segmentation method takes any input and returns processed canvas with body segmentation\n * - Segmentation is not triggered as part of detect process\n *\n * Returns:\n *\n * @param input: {@link Input}\n * @param background?: {@link Input}\n * - Optional parameter background is used to fill the background with specific input\n * @returns {object}\n * - `data` as raw data array with per-pixel segmentation values\n * - `canvas` as canvas which is input image filtered with segementation data and optionally merged with background image. canvas alpha values are set to segmentation values for easy merging\n * - `alpha` as grayscale canvas that represents segmentation alpha values\n */\n async segmentation(input: Input, background?: Input): Promise<{ data: number[], canvas: HTMLCanvasElement | OffscreenCanvas | null, alpha: HTMLCanvasElement | OffscreenCanvas | null }> {\n return segmentation.process(input, background, this.config);\n }\n\n /** Enhance method performs additional enhacements to face image previously detected for futher processing\n *\n * @param input: Tensor as provided in human.result.face[n].tensor\n * @returns Tensor\n */\n // eslint-disable-next-line class-methods-use-this\n enhance(input: Tensor): Tensor | null {\n return faceres.enhance(input);\n }\n\n /** Explicit backend initialization\n * - Normally done implicitly during initial load phase\n * - Call to explictly register and initialize TFJS backend without any other operations\n * - Use when changing backend during runtime\n *\n * @returns {void}\n */\n async init(): Promise {\n await backend.check(this, true);\n await this.tf.ready();\n }\n\n /** Load method preloads all configured models on-demand\n * - Not explicitly required as any required model is load implicitly on it's first run\n *\n * @param userConfig?: {@link Config}\n * @return Promise\n */\n async load(userConfig?: Partial): Promise {\n this.state = 'load';\n const timeStamp = now();\n const count = Object.values(this.models).filter((model) => model).length;\n if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;\n\n if (this.env.initial) { // print version info on first run and check for correct backend setup\n if (this.config.debug) log(`version: ${this.version}`);\n if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);\n if (!await backend.check(this)) log('error: backend check failed');\n await tf.ready();\n if (this.env.browser) {\n if (this.config.debug) log('configuration:', this.config);\n if (this.config.debug) log('environment:', this.env);\n if (this.config.debug) log('tf flags:', this.tf.ENV['flags']);\n }\n }\n\n await models.load(this); // actually loads models\n if (this.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run\n this.env.initial = false;\n\n const loaded = Object.values(this.models).filter((model) => model).length;\n if (loaded !== count) { // number of loaded models changed\n await models.validate(this); // validate kernel ops used by model against current backend\n this.emit('load');\n }\n\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current;\n }\n\n // emit event\n /** @hidden */\n emit = (event: string) => {\n if (this.events && this.events.dispatchEvent) this.events?.dispatchEvent(new Event(event));\n };\n\n /** Runs interpolation using last known result and returns smoothened result\n * Interpolation is based on time since last known result so can be called independently\n *\n * @param result?: {@link Result} optional use specific result set to run interpolation on\n * @returns result: {@link Result}\n */\n next(result: Result = this.result): Result {\n return interpolate.calc(result, this.config) as Result;\n }\n\n /** Warmup method pre-initializes all configured models for faster inference\n * - can take significant time on startup\n * - only used for `webgl` and `humangl` backends\n * @param userConfig?: {@link Config}\n * @returns result: {@link Result}\n */\n async warmup(userConfig?: Partial) {\n const t0 = now();\n const res = await warmups.warmup(this, userConfig);\n const t1 = now();\n this.performance.warmup = Math.trunc(t1 - t0);\n return res;\n }\n\n /** Run detect with tensorflow profiling\n * - result object will contain total exeuction time information for top-20 kernels\n * - actual detection object can be accessed via `human.result`\n */\n async profile(input: Input, userConfig?: Partial): Promise> {\n const profile = await this.tf.profile(() => this.detect(input, userConfig));\n const kernels = {};\n for (const kernel of profile.kernels) { // sum kernel time values per kernel\n if (kernels[kernel.name]) kernels[kernel.name] += kernel.kernelTimeMs;\n else kernels[kernel.name] = kernel.kernelTimeMs;\n }\n const kernelArr: Array<{ name, ms }> = [];\n Object.entries(kernels).forEach((key) => kernelArr.push({ name: key[0], ms: key[1] })); // convert to array\n kernelArr.sort((a, b) => b.ms - a.ms); // sort\n kernelArr.length = 20; // crop\n const res: Record = {};\n for (const kernel of kernelArr) res[kernel.name] = kernel.ms; // create perf objects\n return res;\n }\n\n /** Main detection method\n * - Analyze configuration: {@link Config}\n * - Pre-process input: {@link Input}\n * - Run inference for all configured models\n * - Process and return result: {@link Result}\n *\n * @param input: {@link Input}\n * @param userConfig?: {@link Config}\n * @returns result: {@link Result}\n */\n async detect(input: Input, userConfig?: Partial): Promise